diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..6cb928d --- /dev/null +++ b/.flake8 @@ -0,0 +1,24 @@ +[flake8] +# https://pep8.readthedocs.io/en/latest/intro.html#error-codes +ignore = + # whitespace before ‘:’ + E203 + # line too long (82 > 79 characters) + E501 + # line break before operator + W503 +# Max width of Github code review is 119 characters +max-line-length = 119 +max-complexity = 18 +exclude = + .tox, + .git, + */migrations/*, + */static/CACHE/*, + docs, + node_modules, + .idea, + .mypy_cache, + .pytest_cache, + *__init__.py, + venv, diff --git a/.github/DISCUSSION_TEMPLATE/questions.yml b/.github/DISCUSSION_TEMPLATE/questions.yml new file mode 100644 index 0000000..3caceb2 --- /dev/null +++ b/.github/DISCUSSION_TEMPLATE/questions.yml @@ -0,0 +1,82 @@ +labels: [question] +body: + - type: markdown + attributes: + value: | + Thanks for your interest in zppy-interfaces! Please follow the template below to ensure the development team and community can help you effectively. + + - type: checkboxes + id: checks + attributes: + label: Question criteria + description: Please confirm and check all the following options. + options: + - label: I added a descriptive title here. + required: true + - label: I searched the [zppy-interfaces GitHub Discussions](https://github.com/E3SM-Project/zppy-interfaces/discussions) to find a similar question and didn't find it. + required: true + - label: I searched the [zppy-interfaces documentation](https://e3sm-project.github.io/zppy-interfaces). + required: true + + - type: textarea + id: deadline + attributes: + label: What is the deadline? + description: | + How urgently do you need a response to this question? Is there a day you need a resolution by? Knowing these constraints helps zppy-interfaces developers properly priortize user questions. + validations: + required: true + + - type: textarea + id: your-question + attributes: + label: Describe your question + description: | + Please help the community help you. The more specific you can be, the easier it will be to help. + validations: + required: true + + - type: textarea + id: possible-answers + attributes: + label: Are there are any possible answers you came across? + description: | + This will help others determine if you're on the right track. Include links to pages you've researched (e.g., software docs, Stack Overflow posts). + + - type: textarea + id: machine + attributes: + label: What machine were you running on? + description: | + List the machine(s) you encounter the issue on (e.g., Chrysalis, Compy, Perlmutter). + validations: + required: true + + - type: textarea + id: zi-version + attributes: + label: Environment + description: | + Paste your zppy-interfaces version here (e.g., `zppy-interfaces v0.0.1`). + validations: + required: true + + - type: textarea + id: zi-command + attributes: + label: What command did you run? + description: | + Copy the command causing the issue (e.g., `zi-global-time-series ...`). This will be automatically formatted into code, so no need for markdown backticks. + render: bash + validations: + required: true + + - type: textarea + id: stack-trace + attributes: + label: What stack trace are you encountering? + description: | + Copy a stack trace from one of your failing jobs. This will be automatically formatted into code, so no need for markdown backticks. + render: bash + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000..7e7136f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,53 @@ +name: Bug Report +description: File a bug report to help us improve zppy-interfaces +title: "[Bug]: " +labels: ["semver: bug"] +assignees: [] +body: + - type: textarea + id: what-happened + attributes: + label: What happened? + description: | + Thanks for reporting a bug! Please describe what you were trying to get done. + Tell us what happened and what went wrong. + validations: + required: true + + - type: textarea + id: machine + attributes: + label: What machine were you running on? + description: | + List the machine(s) you encounter the issue on (e.g., Chrysalis, Compy, Perlmutter). + validations: + required: true + + - type: textarea + id: zi-version + attributes: + label: Environment + description: | + Paste your zppy-interfaces version here (e.g., `zppy-interfaces v0.0.1`). + validations: + required: true + + - type: textarea + id: zi-command + attributes: + label: What command did you run? + description: | + Copy the command causing the issue (e.g., `zi-global-time-series ...`). This will be automatically formatted into code, so no need for markdown backticks. + render: bash + validations: + required: true + + - type: textarea + id: stack-trace + attributes: + label: What stack trace are you encountering? + description: | + Copy a stack trace from one of your failing jobs. This will be automatically formatted into code, so no need for markdown backticks. + render: bash + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..46bec7f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,10 @@ +blank_issues_enabled: false +contact_links: + - name: Questions (zppy-interfaces) + url: https://github.com/E3SM-Project/zppy-interfaces/discussions/categories/questions + about: | + Ask questions and discuss with other zppy-interfaces community members here. Please + browse the zppy-interfaces Discussions Forum or zppy-interfaces documentation first before asking a + question to make sure it is not already answered. If you can't find an + answer, please include a self-contained reproducible example with your + question if possible. Thanks! diff --git a/.github/ISSUE_TEMPLATE/documentation.yml b/.github/ISSUE_TEMPLATE/documentation.yml new file mode 100644 index 0000000..1eac8bf --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation.yml @@ -0,0 +1,15 @@ +name: Documentation Update +description: Update zppy-interfaces documentation +title: "[Doc]: " +labels: ["Documentation"] +assignees: [] +body: + - type: textarea + id: description + attributes: + label: Describe your documentation update + description: | + Concise description of why the documentation is being updated (e.g., missing content for new feature, typo) + If this is related to an issue or PR, please mention it. + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..3d5dfb9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,57 @@ +name: Feature Request +description: Suggest an idea for zppy-interfaces +title: "[Feature]: " +labels: [] +assignees: [] +body: + - type: dropdown + id: semver-level + attributes: + label: How will this affect the next version number? + description: | + `zppy-interfaces` uses semantic versioning (https://semver.org/). Bug fixes and small improvements will increment the PATCH version. New features will increment the MINOR version. Incompatible API changes will increment the MAJOR version. The amount of work required to implement a request typically increases with each level. (For bug fixes, use the "Bug Report" template). + multiple: false + options: + - Small improvement (increment PATCH version) + - New feature (increment MINOR version) + - Incompatibile API change (increment MAJOR version) + default: 1 + validations: + required: true + + - type: textarea + id: description + attributes: + label: Is your feature request related to a problem? + description: | + Please do a quick search of existing issues to make sure that this has not been asked before. + Please provide a clear and concise description of what the problem is. E.g., I'm always frustrated when [...] + validations: + required: true + + - type: textarea + id: solution + attributes: + label: Describe the solution you'd like + description: | + A clear and concise description of what you want to happen. + validations: + required: false + + - type: textarea + id: alternatives + attributes: + label: Describe alternatives you've considered + description: | + A clear and concise description of any alternative solutions or features you've considered. + validations: + required: false + + - type: textarea + id: additional-context + attributes: + label: Additional context + description: | + Add any other context about the feature request here. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/other_issue.yml b/.github/ISSUE_TEMPLATE/other_issue.yml new file mode 100644 index 0000000..165ae64 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/other_issue.yml @@ -0,0 +1,24 @@ +name: Other Issue +description: Report an issue not covered by the other templates. +body: + - type: checkboxes + id: checks + attributes: + label: Request criteria + description: Please confirm and check all the following options. + options: + - label: I searched the [zppy-interfaces GitHub Discussions](https://github.com/E3SM-Project/zppy-interfaces/discussions) to find a similar question and didn't find it. + required: true + - label: I searched the [zppy-interfaces documentation](https://e3sm-project.github.io/zppy-interfaces). + required: true + - label: This issue does not match the other templates (i.e., it is not a bug report, documentation request, feature request, or a question.) + required: true + + - type: textarea + id: issue-description + attributes: + label: Issue description + description: | + Please describe the issue. + validations: + required: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..2bc3200 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,42 @@ +## Issue resolution +- Closes # + +Select one: This pull request is... +- [ ] a bug fix: increment the patch version +- [ ] a small improvement: increment the minor version +- [ ] an incompatible (non-backwards compatible) API change: increment the major version + +## 1. Does this do what we want it to do? + +Objectives: +- Objective 1 +- Objective 2 +- ... +- Objective n + +Required: +- [ ] Product Management: I have confirmed with the stakeholders that the objectives above are correct and complete. +- [ ] Testing: I have considered likely and/or severe edge cases and have included them in testing. + +## 2. Are the implementation details accurate & efficient? + +Required: +- [ ] Logic: I have visually inspected the entire pull request myself. +- [ ] Logic: I have left GitHub comments highlighting important pieces of code logic. I have had these code blocks reviewed by at least one other team member. + +If applicable: +- [ ] Dependencies: This pull request introduces a new dependency. I have discussed this requirement with at least one other team member. The dependency is noted in `zppy-interfaces/conda`, not just an `import` statement. + +## 3. Is this well documented? + +Required: +- [ ] Documentation: by looking at the docs, a new user could easily understand the functionality introduced by this pull request. + +## 4. Is this code clean? + +Required: +- [ ] Readability: The code is as simple as possible and well-commented, such that a new team member could understand what's happening. +- [ ] Pre-commit checks: All the pre-commits checks have passed. + +If applicable: +- [ ] Software architecture: I have discussed relevant trade-offs in design decisions with at least one other team member. It is unlikely that this pull request will increase tech debt. diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml new file mode 100644 index 0000000..1cb6587 --- /dev/null +++ b/.github/workflows/build_workflow.yml @@ -0,0 +1,168 @@ +name: CI/CD Build Workflow + +on: + push: + # When a branch is pushed to GitHub, run this workflow. + # This will show up as the checks to pass on a pull request. + branches: [main] + + pull_request: + # When a pull request is merged, run this workflow. + branches: [main] + + workflow_dispatch: + +# These are the 4 jobs that show up under "All checks have passed" on GitHub. +jobs: + # This job determines which jobs to skip. + # It outputs a boolean value of whether to skip a job or not. + # It uses a third party checker for duplicate actions. + check-jobs-to-skip: + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + cancel_others: false + paths_ignore: '["**/README.md", "**/docs/**"]' + + pre-commit-hooks: + needs: check-jobs-to-skip + if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true' }} + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout Code Repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + # Run all pre-commit hooks on all the files. + # Getting only staged files can be tricky in case a new PR is opened + # since the action is run on a branch in detached head state. + # This is the equivalent of running "pre-commit run --all-files" locally. + # If you commit with the `--no-verify` flag, this check may fail. + - name: Install and Run Pre-commit + uses: pre-commit/action@v3.0.0 + + build: + needs: check-jobs-to-skip + if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true' }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v3 + + - name: Cache Conda + uses: actions/cache@v3 + env: + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ + hashFiles('conda/dev.yml') }} + + - name: Build Conda Environment + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: zppy_interfaces_dev + miniforge-variant: Miniforge3 + miniforge-version: latest + environment-file: conda/dev.yml + channel-priority: strict + auto-update-conda: true + + - if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true' }} + name: Show Conda Environment Info + run: | + conda config --set anaconda_upload no + conda info + conda list + + - name: Install `zppy-interfaces` Package + run: pip install . + + # Does not run the integration tests, which require server access + - name: Run Unit Tests + run: | + pytest tests/global_time_series/test_*.py + + # If the branch updates documentation, then the docs will need to be updated. + publish-docs: + if: ${{ github.event_name == 'push' }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + timeout-minutes: 5 + steps: + - uses: actions/checkout@v2 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Cache Conda + uses: actions/cache@v3 + env: + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ + hashFiles('conda/dev.yml') }} + + - name: Build Conda Environment + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: zppy_interfaces_dev + miniforge-variant: Miniforge3 + miniforge-version: latest + environment-file: conda/dev.yml + channel-priority: strict + auto-update-conda: true + + - if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true' }} + name: Show Conda Environment Info + run: | + conda config --set anaconda_upload no + conda info + conda list + + - name: Install `zppy-interfaces` Package + run: pip install . + + # sphinx-multiversion allows for version docs. + - name: Build Sphinx Docs + run: | + cd docs + sphinx-multiversion source _build/html + + - name: Copy Docs and Commit + run: | + # gh-pages branch must already exist + git clone https://github.com/E3SM-Project/zppy-interfaces.git --branch gh-pages --single-branch gh-pages + + # Only replace main docs with latest changes. Docs for tags should be untouched. + cd gh-pages + rm -r _build/html/main + cp -r ../docs/_build/html/main _build/html/main + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + # The below command will fail if no changes were present, so we ignore it + git add . + git commit -m "Update documentation" -a || true + + - name: Push Changes + uses: ad-m/github-push-action@master + with: + branch: gh-pages + directory: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} + force: true diff --git a/.github/workflows/release_workflow.yml b/.github/workflows/release_workflow.yml new file mode 100644 index 0000000..5e56833 --- /dev/null +++ b/.github/workflows/release_workflow.yml @@ -0,0 +1,84 @@ +name: CI/CD Release Workflow + +on: + # This is run on release, meaning it is run when these steps are followed: + # https://e3sm-project.github.io/zppy-interfaces/_build/html/main/dev_guide/release.html#releasing-on-github-production-releases + # That means this workflow is not run for release candidates. + release: + types: [published] + +jobs: + + # On a release, the docs should always be published. + publish-docs: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + timeout-minutes: 5 + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Cache Conda + uses: actions/cache@v3 + env: + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ + hashFiles('conda/dev.yml') }} + + - name: Build Conda Environment + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: zppy_interfaces_dev + miniforge-variant: Miniforge3 + miniforge-version: latest + environment-file: conda/dev.yml + channel-priority: strict + auto-update-conda: true + + - if: ${{ needs.check-jobs-to-skip.outputs.should_skip != 'true' }} + name: Show Conda Environment Info + run: | + conda config --set anaconda_upload no + conda info + conda list + + - name: Install `zppy-interfaces` Package + run: pip install . + + # sphinx-multiversion allows for version docs. + - name: Build Sphinx Docs + run: | + cd docs + sphinx-multiversion source _build/html + + - name: Copy Docs and Commit + run: | + # gh-pages branch must already exist + git clone https://github.com/E3SM-Project/zppy-interfaces.git --branch gh-pages --single-branch gh-pages + cd gh-pages + + # Replace main docs to populate dropdown with latest tags + rm -r _build/html/main + + # Only copy docs for main and current tag + cp -r -n ../docs/_build/html _build/ + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + # The below command will fail if no changes were present, so we ignore it + git add . + git commit -m "Update documentation" -a || true + + - name: Push Changes + uses: ad-m/github-push-action@master + with: + branch: gh-pages + directory: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} + force: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d2cea22 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +build/ +dist/ +zppy_interfaces.egg-info/ +*.pyc +*~ +\#* +__pycache__/ + +# Sphinx documentation +docs/_build/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..90469d1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,46 @@ +exclude: "docs|node_modules|migrations|.git|.tox" +default_stages: [commit] +fail_fast: true + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + exclude: conda/meta.yaml + + # Can run individually with `pre-commit run black --all-files` + - repo: https://github.com/psf/black + rev: 24.10.0 + hooks: + - id: black + + # Can run individually with `pre-commit run isort --all-files` + - repo: https://github.com/PyCQA/isort + rev: 5.13.2 + hooks: + - id: isort + + # Can run individually with `pre-commit run flake8 --all-files` + # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab + # https://github.com/pre-commit/pre-commit/issues/1206 + - repo: https://github.com/pycqa/flake8 + rev: 7.1.1 + hooks: + - id: flake8 + args: ["--config=.flake8"] + additional_dependencies: [flake8-isort==6.1.1] + + # Can run individually with `pre-commit run mypy --all-files` + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.11.2 + hooks: + - id: mypy + args: ["--config=pyproject.toml"] + +# https://pre-commit.ci/#configuration +ci: + autofix_prs: false + autoupdate_schedule: monthly diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..3cf245f --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,14 @@ +{ + "[python]": { + // [comments, max line length, wrap line length] + // Black does not wrap comments. + "editor.rulers": [80, 88, 120], + "editor.wordWrap": "wordWrapColumn", + "editor.wordWrapColumn": 120, + "editor.defaultFormatter": "ms-python.python" + }, + "python.formatting.provider": "black", + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": ["--config=setup.cfg"], + "python.linting.mypyEnabled": true, +} diff --git a/conda/dev.yml b/conda/dev.yml new file mode 100644 index 0000000..57bb44f --- /dev/null +++ b/conda/dev.yml @@ -0,0 +1,43 @@ +# Conda development environment for testing local source code changes to zppy-interfaces before merging them to production (main branch). +name: zppy-interfaces-dev +channels: + - conda-forge + - defaults +dependencies: + # Build + # ======================= + - python >=3.9 + - pip + - setuptools >= 60 + # Base + # ================= + - matplotlib-base >=3.8.2,<3.10 + - mpas_tools >=0.21.0 + - netcdf4 + - numpy >=2.0,<3.0 + - xarray >=2023.02.0 + - xcdat >=0.7.3,<1.0 + # Testing + # ======================= + - pytest + - pytest-cov + # Documentation + # ======================= + - sphinx + - sphinx-multiversion + - sphinx_rtd_theme + # Quality Assurance Tools + # ======================= + # Run `pre-commit autoupdate` to get the latest pinned versions of 'rev' in + # `.pre-commit.config.yaml`, then update the pinned versions here. + - black=24.10.0 + - flake8=7.1.1 + - flake8-isort=6.1.0 + - isort=5.13.2 + - mypy=1.11.2 + - pre-commit >=3.0.0 + - types-PyYAML >=6.0.0 + # Developer Tools + # ======================= + - tbump=6.9.0 + - ipykernel diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..8e7b418 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,122 @@ +[project] +name = "zppy_interfaces" +dynamic = ["version"] +authors = [ + { name="Ryan Forsyth", email="forsyth2@llnl.gov" } +] +description = "A package for providing extra functionality on top of external packages" +license = {file = "LICENSE"} +readme = "README.md" +requires-python = ">=3.9" +classifiers = [ + # these are only for searching/browsing projects on PyPI + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", +] + +dependencies = [ + "matplotlib >=3.8.2,<3.10", + "netcdf4", + "numpy >=2.0,<3.0", + "xarray >=2023.02.0", + "xcdat >=0.7.3,<1.0", +] + +[project.optional-dependencies] + +testing = [ + "pytest", + "pytest-cov", +] + +docs = [ + "sphinx", + "sphinx_rtd_theme", + "sphinx-multiversion", +] + +qa = [ + "black==24.10.1", + "flake8==7.1.1", + "flake8-isort==6.1.0", + "isort==5.13.2", + "mypy==1.11.2", + "pre-commit >=3.0.0", + "types-PyYAML >=6.0.0", +] + +dev = [ + "tbump==6.9.0", + "ipykernel", +] + +[tool.black] +line-length = 88 +target-version = ['py39'] +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.mypy_cache + | _build + | conda + | docs + )/ +''' + +[tool.isort] +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +line_length = 88 + +[tool.pycodestyle] +max-line-length = 119 +exclude = ''' +/( + \.tox + | \.git + | */migrations/* + | */static/CACHE/* + | docs + | node_modules + | \.idea + | \.mypy_cache + | \.pytest_cache + | *__init__.py + | venv + )/ +''' + +[tool.mypy] +python_version = 3.9 +check_untyped_defs = true +ignore_missing_imports = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_unused_configs = true + +[build-system] +requires = ["setuptools>=60"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +exclude = [ "conda*", "docs*", "tests*"] + +[tool.setuptools.dynamic] +version = { attr = "zppy_interfaces.version.__version__" } + +# evolution of options.entry-points +[project.scripts] +zi-global-time-series = "zppy_interfaces.global_time_series.__main__:main" + +[project.urls] +Documentation = "https://docs.e3sm.org/zppy-interfaces" +"Bug Tracker" = "https://github.com/E3SM-Project/zppy-interfaces/issues" diff --git a/tests/global_time_series/test_global_time_series.py b/tests/global_time_series/test_global_time_series.py new file mode 100644 index 0000000..5c80918 --- /dev/null +++ b/tests/global_time_series/test_global_time_series.py @@ -0,0 +1,279 @@ +from typing import Any, Dict, List + +import pytest + +from zppy_interfaces.global_time_series.coupled_global import ( + Metric, + Variable, + construct_generic_variables, + get_data_dir, + get_exps, + get_vars_original, + get_ylim, +) +from zppy_interfaces.global_time_series.utils import ( + Parameters, + get_region, + param_get_list, +) + +# Run tests with `pytest tests/global_time_series` + + +# Helper function +def get_var_names(vars: List[Variable]): + return list(map(lambda v: v.variable_name, vars)) + + +def test_param_get_list(): + assert param_get_list("None") == [] + + assert param_get_list("a") == ["a"] + assert param_get_list("a,b,c") == ["a", "b", "c"] + + assert param_get_list("") == [""] + assert param_get_list("a,") == ["a", ""] + assert param_get_list("a,b,c,") == ["a", "b", "c", ""] + + +def test_get_region(): + assert get_region("glb") == "glb" + assert get_region("global") == "glb" + assert get_region("GLB") == "glb" + assert get_region("Global") == "glb" + + assert get_region("n") == "n" + assert get_region("north") == "n" + assert get_region("northern") == "n" + assert get_region("N") == "n" + assert get_region("North") == "n" + assert get_region("Northern") == "n" + + assert get_region("s") == "s" + assert get_region("south") == "s" + assert get_region("southern") == "s" + assert get_region("S") == "s" + assert get_region("South") == "s" + assert get_region("Southern") == "s" + + with pytest.raises(ValueError): + get_region("not-a-region") + + +def test_Parameters_and_related_functions(): + # Consider the following parameters given by a user. + args: Dict[str, str] = { + "use_ocn": "True", + "input": "/lcrc/group/e3sm2/ac.wlin/E3SMv3/v3.LR.historical_0051", + "input_subdir": "archive/atm/hist", + "moc_file": "mocTimeSeries_1985-1995.nc", + "case_dir": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051", + "experiment_name": "v3.LR.historical_0051", + "figstr": "v3.LR.historical_0051", + "color": "Blue", + "ts_num_years": "5", + "plots_original": "None", + "atmosphere_only": "False", + "plots_atm": "TREFHT", + "plots_ice": "None", + "plots_lnd": "FSH,RH2M,LAISHA,LAISUN,QINTR,QOVER,QRUNOFF,QSOIL,QVEGE,QVEGT,SOILWATER_10CM,TSA,H2OSNO,TOTLITC,CWDC,SOIL1C,SOIL2C,SOIL3C,SOIL4C,WOOD_HARVESTC,TOTVEGC,NBP,GPP,AR,HR", + "plots_ocn": "None", + "nrows": "1", + "ncols": "1", + "results_dir": "results", + "regions": "glb,n,s", + "start_yr": "1985", + "end_yr": "1989", + } + # Then: + parameters: Parameters = Parameters(args) + assert ( + parameters.case_dir + == "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051" + ) + assert parameters.experiment_name == "v3.LR.historical_0051" + assert parameters.figstr == "v3.LR.historical_0051" + assert parameters.year1 == 1985 + assert parameters.year2 == 1989 + assert parameters.color == "Blue" + assert parameters.ts_num_years_str == "5" + assert parameters.plots_original == [] + assert parameters.atmosphere_only == False # noqa: E712 + assert parameters.plots_atm == ["TREFHT"] + assert parameters.plots_ice == [] + assert parameters.plots_lnd == [ + "FSH", + "RH2M", + "LAISHA", + "LAISUN", + "QINTR", + "QOVER", + "QRUNOFF", + "QSOIL", + "QVEGE", + "QVEGT", + "SOILWATER_10CM", + "TSA", + "H2OSNO", + "TOTLITC", + "CWDC", + "SOIL1C", + "SOIL2C", + "SOIL3C", + "SOIL4C", + "WOOD_HARVESTC", + "TOTVEGC", + "NBP", + "GPP", + "AR", + "HR", + ] + assert parameters.plots_ocn == [] + assert parameters.nrows == 1 + assert parameters.ncols == 1 + assert parameters.regions == ["glb", "n", "s"] + + # test_get_data_dir + assert ( + get_data_dir(parameters, "atm", True) + == "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/atm/glb/ts/monthly/5yr/" + ) + assert get_data_dir(parameters, "atm", False) == "" + assert ( + get_data_dir(parameters, "ice", True) + == "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/ice/glb/ts/monthly/5yr/" + ) + assert get_data_dir(parameters, "ice", False) == "" + assert ( + get_data_dir(parameters, "lnd", True) + == "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/lnd/glb/ts/monthly/5yr/" + ) + assert get_data_dir(parameters, "lnd", False) == "" + assert ( + get_data_dir(parameters, "ocn", True) + == "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/ocn/glb/ts/monthly/5yr/" + ) + assert get_data_dir(parameters, "ocn", False) == "" + + # test_get_exps + exps: List[Dict[str, Any]] = get_exps(parameters) + assert len(exps) == 1 + expected = { + "atmos": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/atm/glb/ts/monthly/5yr/", + "ice": "", + "land": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/lnd/glb/ts/monthly/5yr/", + "ocean": "", + "moc": "", + "vol": "", + "name": "v3.LR.historical_0051", + "yoffset": 0.0, + "yr": ([1985, 1989],), + "color": "Blue", + } + assert exps[0] == expected + # Change up parameters + parameters.plots_original = "net_toa_flux_restom,global_surface_air_temperature,toa_radiation,net_atm_energy_imbalance,change_ohc,max_moc,change_sea_level,net_atm_water_imbalance".split( + "," + ) + parameters.plots_atm = [] + parameters.plots_lnd = [] + exps = get_exps(parameters) + assert len(exps) == 1 + expected = { + "atmos": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/atm/glb/ts/monthly/5yr/", + "ice": "", + "land": "", + "ocean": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/ocn/glb/ts/monthly/5yr/", + "moc": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/ocn/glb/ts/monthly/5yr/", + "vol": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/ocn/glb/ts/monthly/5yr/", + "name": "v3.LR.historical_0051", + "yoffset": 0.0, + "yr": ([1985, 1989],), + "color": "Blue", + } + assert exps[0] == expected + # Change up parameters + parameters.atmosphere_only = True + exps = get_exps(parameters) + assert len(exps) == 1 + expected = { + "atmos": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_single_plots_output/test-616-20240930/v3.LR.historical_0051/post/atm/glb/ts/monthly/5yr/", + "ice": "", + "land": "", + "ocean": "", + "moc": "", + "vol": "", + "name": "v3.LR.historical_0051", + "yoffset": 0.0, + "yr": ([1985, 1989],), + "color": "Blue", + } + assert exps[0] == expected + + +def test_Variable(): + v = Variable( + "var_name", + original_units="units1", + final_units="units2", + group="group_name", + long_name="long name", + ) + assert v.variable_name == "var_name" + assert v.metric == Metric.AVERAGE + assert v.scale_factor == 1.0 + assert v.original_units == "units1" + assert v.final_units == "units2" + assert v.group == "group_name" + assert v.long_name == "long name" + + +def test_get_vars_original(): + assert get_var_names(get_vars_original(["net_toa_flux_restom"])) == ["RESTOM"] + assert get_var_names(get_vars_original(["net_atm_energy_imbalance"])) == [ + "RESTOM", + "RESSURF", + ] + assert get_var_names(get_vars_original(["global_surface_air_temperature"])) == [ + "TREFHT" + ] + assert get_var_names(get_vars_original(["toa_radiation"])) == ["FSNTOA", "FLUT"] + assert get_var_names(get_vars_original(["net_atm_water_imbalance"])) == [ + "PRECC", + "PRECL", + "QFLX", + ] + assert get_var_names( + get_vars_original( + [ + "net_toa_flux_restom", + "net_atm_energy_imbalance", + "global_surface_air_temperature", + "toa_radiation", + "net_atm_water_imbalance", + ] + ) + ) == ["RESTOM", "RESSURF", "TREFHT", "FSNTOA", "FLUT", "PRECC", "PRECL", "QFLX"] + assert get_var_names(get_vars_original(["invalid_plot"])) == [] + + +def test_construct_generic_variables(): + vars: List[str] = ["a", "b", "c"] + assert get_var_names(construct_generic_variables(vars)) == vars + + +def test_get_ylim(): + # Min is equal, max is equal + assert get_ylim([-1, 1], [-1, 1]) == [-1, 1] + # Min is lower, max is equal + assert get_ylim([-1, 1], [-2, 1]) == [-2, 1] + # Min is equal, max is higher + assert get_ylim([-1, 1], [-1, 2]) == [-1, 2] + # Min is lower, max is higher + assert get_ylim([-1, 1], [-2, 2]) == [-2, 2] + # Min is lower, max is higher, multiple extreme_values + assert get_ylim([-1, 1], [-2, -0.5, 0.5, 2]) == [-2, 2] + # No standard range + assert get_ylim([], [-2, 2]) == [-2, 2] + # No extreme range + assert get_ylim([-1, 1], []) == [-1, 1] diff --git a/zppy_interfaces/__init__.py b/zppy_interfaces/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zppy_interfaces/global_time_series/__init__.py b/zppy_interfaces/global_time_series/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zppy_interfaces/global_time_series/__main__.py b/zppy_interfaces/global_time_series/__main__.py new file mode 100644 index 0000000..337bac7 --- /dev/null +++ b/zppy_interfaces/global_time_series/__main__.py @@ -0,0 +1,115 @@ +import argparse +import os +import shutil +import sys + +from zppy_interfaces.global_time_series.coupled_global import coupled_global +from zppy_interfaces.global_time_series.ocean_month import ocean_month +from zppy_interfaces.global_time_series.utils import Parameters +from zppy_interfaces.multi_utils.logger import _setup_custom_logger + +logger = _setup_custom_logger(__name__) + + +def main(parameters=None): + if not parameters: + parameters = _get_args() + + if parameters.use_ocn: + logger.info("Create ocean time series") + # NOTE: MODIFIES THE CASE DIRECTORY (parameters.case_dir) post subdirectory + os.makedirs( + f"{parameters.case_dir}/post/ocn/glb/ts/monthly/{parameters.ts_num_years_str}yr", + exist_ok=True, + ) + input: str = f"{parameters.input}/{parameters.input_subdir}" + # NOTE: MODIFIES THE CASE DIRECTORY (parameters.case_dir) post subdirectory + ocean_month( + input, + parameters.case_dir, + parameters.year1, + parameters.year2, + int(parameters.ts_num_years_str), + ) + + logger.info("Copy moc file") + # NOTE: MODIFIES THE CASE DIRECTORY (parameters.case_dir) post subdirectory + shutil.copy( + f"{parameters.case_dir}/post/analysis/mpas_analysis/cache/timeseries/moc/{parameters.moc_file}", + f"{parameters.case_dir}/post/ocn/glb/ts/monthly/{parameters.ts_num_years_str}yr/", + ) + + logger.info("Update time series figures") + # NOTE: PRODUCES OUTPUT IN THE CURRENT DIRECTORY + coupled_global(parameters) + + +def _get_args() -> Parameters: + # Parser + parser: argparse.ArgumentParser = argparse.ArgumentParser( + usage="zi-global-time-series ", + description="Generate Global Time Series plots", + ) + + # For ocean_month + parser.add_argument("--use_ocn", type=str, help="Use ocean") + parser.add_argument("--input", type=str, help="Input directory") + parser.add_argument("--input_subdir", type=str, help="Input subdirectory") + parser.add_argument("--moc_file", type=str, help="MOC file") + + # For coupled_global + parser.add_argument("--case_dir", type=str, help="Case directory") + parser.add_argument("--experiment_name", type=str, help="Experiment name") + parser.add_argument("--figstr", type=str, help="Figure string") + parser.add_argument("--color", type=str, help="Color") + parser.add_argument("--ts_num_years", type=str, help="Time series number of years") + parser.add_argument("--plots_original", type=str, help="Plots original") + parser.add_argument("--atmosphere_only", type=str, help="Atmosphere only") + parser.add_argument("--plots_atm", type=str, help="Plots atmosphere") + parser.add_argument("--plots_ice", type=str, help="Plots ice") + parser.add_argument("--plots_lnd", type=str, help="Plots land") + parser.add_argument("--plots_ocn", type=str, help="Plots ocean") + parser.add_argument("--nrows", type=str, help="Number of rows in pdf") + parser.add_argument("--ncols", type=str, help="Number of columns in pdf") + parser.add_argument("--results_dir", type=str, help="Results directory") + parser.add_argument("--regions", type=str, help="Regions") + + # For both + parser.add_argument("--start_yr", type=str, help="Start year") + parser.add_argument("--end_yr", type=str, help="End year") + + # Ignore the first arg + # (zi-global-time-series) + args: argparse.Namespace = parser.parse_args(sys.argv[1:]) + + return Parameters(vars(args)) + + +# Run with `python __main__.py` +if __name__ == "__main__": + parameters: Parameters = Parameters( + { + "use_ocn": "True", + "input": "/lcrc/group/e3sm2/ac.wlin/E3SMv3/v3.LR.historical_0051", + "input_subdir": "archive/ocn/hist", + "moc_file": "mocTimeSeries_1985-1995.nc", + "case_dir": "/lcrc/group/e3sm/ac.forsyth2/zppy_min_case_global_time_series_original_8_output/test-642-working-env-20241121/v3.LR.historical_0051", + "experiment_name": "v3.LR.historical_0051", + "figstr": "v3.LR.historical_0051", + "color": "Blue", + "ts_num_years": "5", + "plots_original": "net_toa_flux_restom,global_surface_air_temperature,toa_radiation,net_atm_energy_imbalance,change_ohc,max_moc,change_sea_level,net_atm_water_imbalance", + "atmosphere_only": "False", + "plots_atm": "None", + "plots_ice": "None", + "plots_lnd": "None", + "plots_ocn": "None", + "nrows": "4", + "ncols": "2", + "results_dir": "global_time_series_1985-1995_results", + "regions": "glb,n,s", + "start_yr": "1985", + "end_yr": "1995", + } + ) + main(parameters) diff --git a/zppy_interfaces/global_time_series/coupled_global.py b/zppy_interfaces/global_time_series/coupled_global.py new file mode 100644 index 0000000..67ed921 --- /dev/null +++ b/zppy_interfaces/global_time_series/coupled_global.py @@ -0,0 +1,1037 @@ +# Script to plot some global atmosphere and ocean time series +import glob +import math +import os +import traceback +from enum import Enum +from typing import Any, Dict, List, Tuple + +import cftime +import matplotlib as mpl +import matplotlib.backends.backend_pdf +import matplotlib.pyplot as plt +import numpy as np +import xarray +import xcdat +from netCDF4 import Dataset + +from zppy_interfaces.global_time_series.utils import Parameters +from zppy_interfaces.multi_utils.logger import _setup_custom_logger + +logger = _setup_custom_logger(__name__) + + +mpl.use("Agg") + + +# Useful classes and their helper functions ################################### +class Metric(Enum): + AVERAGE = 1 + + +class Variable(object): + def __init__( + self, + variable_name, + metric=Metric.AVERAGE, + scale_factor=1.0, + original_units="", + final_units="", + group="", + long_name="", + ): + # The name of the EAM/ELM/etc. variable on the monthly h0 history file + self.variable_name: str = variable_name + + # These fields are used for computation + # Global average over land area or global total + self.metric: Metric = metric + # The factor that should convert from original_units to final_units, after standard processing with nco + self.scale_factor: float = scale_factor + # Test string for the units as given on the history file (included here for possible testing) + self.original_units: str = original_units + # The units that should be reported in time series plots, based on metric and scale_factor + self.final_units: str = final_units + + # These fields are used for plotting + # A name used to cluster variables together, to be separated in groups within the output web pages + self.group: str = group + # Descriptive text to add to the plot page to help users identify the variable + self.long_name: str = long_name + + +def get_vars_original(plots_original: List[str]) -> List[Variable]: + # NOTE: These are ALL atmosphere variables + vars_original: List[Variable] = [] + if ("net_toa_flux_restom" in plots_original) or ( + "net_atm_energy_imbalance" in plots_original + ): + vars_original.append(Variable("RESTOM")) + if "net_atm_energy_imbalance" in plots_original: + vars_original.append(Variable("RESSURF")) + if "global_surface_air_temperature" in plots_original: + vars_original.append(Variable("TREFHT")) + if "toa_radiation" in plots_original: + vars_original.append(Variable("FSNTOA")) + vars_original.append(Variable("FLUT")) + if "net_atm_water_imbalance" in plots_original: + vars_original.append(Variable("PRECC")) + vars_original.append(Variable("PRECL")) + vars_original.append(Variable("QFLX")) + return vars_original + + +def construct_generic_variables(requested_vars: List[str]) -> List[Variable]: + var_list: List[Variable] = [] + for var_name in requested_vars: + var_list.append(Variable(var_name)) + return var_list + + +class RequestedVariables(object): + def __init__(self, parameters: Parameters): + self.vars_original: List[Variable] = get_vars_original( + parameters.plots_original + ) + self.vars_atm: List[Variable] = construct_generic_variables( + parameters.plots_atm + ) + self.vars_land: List[Variable] = construct_generic_variables( + parameters.plots_lnd + ) + self.vars_ice: List[Variable] = construct_generic_variables( + parameters.plots_ice + ) + self.vars_ocn: List[Variable] = construct_generic_variables( + parameters.plots_ocn + ) + + +class TS(object): + def __init__(self, directory): + + self.directory: str = directory + + # `directory` will be of the form `{case_dir}/post//glb/ts/monthly/{ts_num_years_str}yr/` + self.f: xarray.core.dataset.Dataset = xcdat.open_mfdataset( + f"{directory}*.nc", center_times=True + ) + + def __del__(self): + + self.f.close() + + def globalAnnualHelper( + self, + var: str, + metric: Metric, + scale_factor: float, + original_units: str, + final_units: str, + ) -> Tuple[xarray.core.dataarray.DataArray, str]: + + data_array: xarray.core.dataarray.DataArray + units: str = "" + + # Constants, from AMWG diagnostics + Lv = 2.501e6 + Lf = 3.337e5 + + # Is this a derived variable? + if var == "RESTOM": + FSNT, _ = self.globalAnnualHelper( + "FSNT", metric, scale_factor, original_units, final_units + ) + FLNT, _ = self.globalAnnualHelper( + "FLNT", metric, scale_factor, original_units, final_units + ) + data_array = FSNT - FLNT + elif var == "RESTOA": + logger.warning("NOT READY") + FSNTOA, _ = self.globalAnnualHelper( + "FSNTOA", metric, scale_factor, original_units, final_units + ) + FLUT, _ = self.globalAnnualHelper( + "FLUT", metric, scale_factor, original_units, final_units + ) + data_array = FSNTOA - FLUT + elif var == "LHFLX": + QFLX, _ = self.globalAnnualHelper( + "QFLX", metric, scale_factor, original_units, final_units + ) + PRECC, _ = self.globalAnnualHelper( + "PRECC", metric, scale_factor, original_units, final_units + ) + PRECL, _ = self.globalAnnualHelper( + "PRECL", metric, scale_factor, original_units, final_units + ) + PRECSC, _ = self.globalAnnualHelper( + "PRECSC", metric, scale_factor, original_units, final_units + ) + PRECSL, _ = self.globalAnnualHelper( + "PRECSL", metric, scale_factor, original_units, final_units + ) + data_array = (Lv + Lf) * QFLX - Lf * 1.0e3 * ( + PRECC + PRECL - PRECSC - PRECSL + ) + elif var == "RESSURF": + FSNS, _ = self.globalAnnualHelper( + "FSNS", metric, scale_factor, original_units, final_units + ) + FLNS, _ = self.globalAnnualHelper( + "FLNS", metric, scale_factor, original_units, final_units + ) + SHFLX, _ = self.globalAnnualHelper( + "SHFLX", metric, scale_factor, original_units, final_units + ) + LHFLX, _ = self.globalAnnualHelper( + "LHFLX", metric, scale_factor, original_units, final_units + ) + data_array = FSNS - FLNS - SHFLX - LHFLX + elif var == "PREC": + PRECC, _ = self.globalAnnualHelper( + "PRECC", metric, scale_factor, original_units, final_units + ) + PRECL, _ = self.globalAnnualHelper( + "PRECL", metric, scale_factor, original_units, final_units + ) + data_array = 1.0e3 * (PRECC + PRECL) + else: + # Non-derived variables + if metric == Metric.AVERAGE: + annual_average_dataset_for_var: xarray.core.dataset.Dataset = ( + self.f.temporal.group_average(var, "year") + ) + data_array = annual_average_dataset_for_var.data_vars[var] + else: + # This shouldn't be possible + raise ValueError(f"Invalid Enum option for metric={metric}") + units = data_array.units + # `units` will be "1" if it's a dimensionless quantity + if (units != "1") and (original_units != "") and original_units != units: + raise ValueError( + f"Units don't match up: Have {units} but expected {original_units}. This renders the supplied scale_factor ({scale_factor}) unusable." + ) + data_array *= scale_factor + units = final_units + return data_array, units + + def globalAnnual( + self, var: Variable + ) -> Tuple[xarray.core.dataarray.DataArray, str]: + return self.globalAnnualHelper( + var.variable_name, + var.metric, + var.scale_factor, + var.original_units, + var.final_units, + ) + + +# Setup ####################################################################### +def get_data_dir(parameters: Parameters, component: str, conditional: bool) -> str: + return ( + f"{parameters.case_dir}/post/{component}/glb/ts/monthly/{parameters.ts_num_years_str}yr/" + if conditional + else "" + ) + + +def get_exps(parameters: Parameters) -> List[Dict[str, Any]]: + # Experiments + use_atmos: bool = (parameters.plots_atm != []) or (parameters.plots_original != []) + # Use set intersection: check if any of these 3 plots were requested + set_intersection: set = set(["change_ohc", "max_moc", "change_sea_level"]) & set( + parameters.plots_original + ) + has_original_ocn_plots: bool = set_intersection != set() + use_ocn: bool = (parameters.plots_ocn != []) or ( + (not parameters.atmosphere_only) and has_original_ocn_plots + ) + ocean_dir = get_data_dir(parameters, "ocn", use_ocn) + exps: List[Dict[str, Any]] = [ + { + "atmos": get_data_dir(parameters, "atm", use_atmos), + "ice": get_data_dir(parameters, "ice", parameters.plots_ice != []), + "land": get_data_dir(parameters, "lnd", parameters.plots_lnd != []), + "ocean": ocean_dir, + "moc": ocean_dir, + "vol": ocean_dir, + "name": parameters.experiment_name, + "yoffset": 0.0, + "yr": ([parameters.year1, parameters.year2],), + "color": f"{parameters.color}", + } + ] + return exps + + +def set_var( + exp: Dict[str, Any], + exp_key: str, + var_list: List[Variable], + valid_vars: List[str], + invalid_vars: List[str], + rgn: str, +) -> None: + if exp[exp_key] != "": + try: + ts_object: TS = TS(exp[exp_key]) + except Exception as e: + logger.critical(e) + logger.critical( + f"TS object could not be created for {exp_key}={exp[exp_key]}" + ) + raise e + for var in var_list: + var_str: str = var.variable_name + try: + data_array: xarray.core.dataarray.DataArray + units: str + data_array, units = ts_object.globalAnnual(var) + valid_vars.append(str(var_str)) + except Exception as e: + logger.error(e) + logger.error(f"globalAnnual failed for {var_str}") + invalid_vars.append(str(var_str)) + continue + if data_array.sizes["rgn"] > 1: + # number of years x 3 regions = data_array.shape + # 3 regions = global, northern hemisphere, southern hemisphere + # We get here if we used the updated `ts` task + # (using `rgn_avg` rather than `glb_avg`). + if rgn == "glb": + n = 0 + elif rgn == "n": + n = 1 + elif rgn == "s": + n = 2 + else: + raise RuntimeError(f"Invalid rgn={rgn}") + data_array = data_array.isel(rgn=n) # Just use nth region + elif rgn != "glb": + # data_array only has one dimension -- glb. + # Therefore it is not possible to get n or s plots. + raise RuntimeError( + f"var={var_str} only has global data. Cannot process rgn={rgn}" + ) + exp["annual"][var_str] = (data_array, units) + if "year" not in exp["annual"]: + years: np.ndarray[cftime.DatetimeNoLeap] = data_array.coords[ + "time" + ].values + exp["annual"]["year"] = [x.year for x in years] + del ts_object + + +def process_data( + parameters: Parameters, requested_variables: RequestedVariables, rgn: str +) -> List[Dict[str, Any]]: + exps: List[Dict[str, Any]] = get_exps(parameters) + valid_vars: List[str] = [] + invalid_vars: List[str] = [] + exp: Dict[str, Any] + for exp in exps: + exp["annual"] = {} + + set_var( + exp, + "atmos", + requested_variables.vars_original, + valid_vars, + invalid_vars, + rgn, + ) + set_var( + exp, "atmos", requested_variables.vars_atm, valid_vars, invalid_vars, rgn + ) + set_var(exp, "ice", requested_variables.vars_ice, valid_vars, invalid_vars, rgn) + set_var( + exp, "land", requested_variables.vars_land, valid_vars, invalid_vars, rgn + ) + set_var( + exp, "ocean", requested_variables.vars_ocn, valid_vars, invalid_vars, rgn + ) + + # Optionally read ohc + if exp["ocean"] != "": + ts = TS(exp["ocean"]) + exp["annual"]["ohc"], _ = ts.globalAnnual(Variable("ohc")) + # anomalies with respect to first year + exp["annual"]["ohc"][:] = exp["annual"]["ohc"][:] - exp["annual"]["ohc"][0] + + if exp["vol"] != "": + ts = TS(exp["vol"]) + exp["annual"]["volume"], _ = ts.globalAnnual(Variable("volume")) + # annomalies with respect to first year + exp["annual"]["volume"][:] = ( + exp["annual"]["volume"][:] - exp["annual"]["volume"][0] + ) + + logger.info( + f"{rgn} region globalAnnual was computed successfully for these variables: {valid_vars}" + ) + logger.error( + f"{rgn} region globalAnnual could not be computed for these variables: {invalid_vars}" + ) + return exps + + +############################################################################### + + +# ---additional function to get moc time series +def getmoc(dir_in): + files = sorted(glob.glob(dir_in + "mocTimeSeries*.nc")) + nfiles = len(files) + logger.info(f"{dir_in} {nfiles} moc files in total") + var = np.array([]) + time = np.array([]) + for i in range(nfiles): + # Open input file + fin = Dataset(files[i], "r") + time0 = fin["year"][:] + var0 = fin["mocAtlantic26"][:] + for iyear in range(int(time0[0]), int(time0[-1]) + 1): + if i > 0 and iyear <= time[-1]: + logger.info( + f"the amoc value for year {iyear} has been included in the moc time series from another moc file {files[i - 1]} {time[-1]} Skipping..." + ) + else: + imon = np.where(time0 == iyear)[0] + if len(imon) == 12: + var = np.append(var, np.mean(var0[imon])) + time = np.append(time, iyear) + else: + logger.error(f"error in input file : {files[i]}") + + return time, var + + +# ----------------------------------------------------------------------------- +# Function to add horizontal line showing average value over a specified period +def add_line(year, var, year1, year2, ax, format="%4.2f", lw=1, color="b"): + + i1 = (np.abs(year - year1)).argmin() + i2 = (np.abs(year - year2)).argmin() + + tmp = np.average(var[i1 : i2 + 1]) + ax.plot((year[i1], year[i2]), (tmp, tmp), lw=lw, color=color, label="average") + ax.text(ax.get_xlim()[1] + 1, tmp, format % tmp, va="center", color=color) + + return + + +# ----------------------------------------------------------------------------- +# Function to add line showing linear trend over a specified period +def add_trend( + year, + var, + year1, + year2, + ax, + format="%4.2f", + lw=1, + color="b", + verbose=False, + ohc=False, + vol=False, +): + + i1 = (np.abs(year - year1)).argmin() + i2 = (np.abs(year - year2)).argmin() + x = year[i1 : i2 + 1] + y = var[i1 : i2 + 1] + + fit = np.polyfit(x, y, 1) + if verbose: + logger.info(fit) + fit_fn = np.poly1d(fit) + ax.plot(x, fit_fn(x), lw=lw, ls="--", c=color, label="trend") + if ohc: + # Earth radius 6371229. from MPAS-O output files + heat_uptake = fit[0] / (4.0 * math.pi * (6371229.0) ** 2 * 365.0 * 86400.0) + ax.text( + ax.get_xlim()[1] + 1, + fit_fn(x[-1]), + "%+4.2f W m$^{-2}$" % (heat_uptake), + color=color, + ) + if vol: + # Earth radius 6371229. from MPAS-O output files + # sea_lvl = fit[0] / ( 4.0*math.pi*(6371229.)**2*0.7) #for oceanic portion of the Earth surface + ax.text( + ax.get_xlim()[1] + 1, + fit_fn(x[-1]), + "%+5.4f mm yr$^{-1}$" % (fit[0]), + color=color, + ) + + return + + +# ----------------------------------------------------------------------------- +# Function to get ylim +def get_ylim(standard_range, extreme_values): + if len(extreme_values) > 0: + has_extreme_values = True + extreme_min = np.amin(extreme_values) + extreme_max = np.amax(extreme_values) + else: + has_extreme_values = False + extreme_min = None + extreme_max = None + if len(standard_range) == 2: + has_standard_range = True + standard_min = standard_range[0] + standard_max = standard_range[1] + else: + has_standard_range = False + standard_min = None + standard_max = None + if has_extreme_values and has_standard_range: + # Use at least the standard range, + # perhaps a wider window to include extremes + if standard_min <= extreme_min: + ylim_min = standard_min + else: + ylim_min = extreme_min + if standard_max >= extreme_max: + ylim_max = standard_max + else: + ylim_max = extreme_max + elif has_extreme_values and not has_standard_range: + ylim_min = extreme_min + ylim_max = extreme_max + elif has_standard_range and not has_extreme_values: + ylim_min = standard_min + ylim_max = standard_max + else: + raise ValueError("Not enough range information supplied") + return [ylim_min, ylim_max] + + +# ----------------------------------------------------------------------------- +# Plotting functions + + +# 1 +def plot_net_toa_flux_restom(ax, xlim, exps, rgn): + logger.info("Plot 1: plot_net_toa_flux_restom") + param_dict = { + "2nd_var": False, + "axhline_y": 0, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": True, + "default_ylim": [-1.5, 1.5], + "do_add_line": True, + "do_add_trend": True, + "format": "%4.2f", + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": True, + "set_legend": True, + "shorten_year": False, + "title": "Net TOA flux (restom)", + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"]["RESTOM"][0]), + "verbose": False, + "vol": False, + "ylabel": "W m-2", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 2 +def plot_global_surface_air_temperature(ax, xlim, exps, rgn): + logger.info("Plot 2: plot_global_surface_air_temperature") + if rgn == "glb": + region_title = "Global" + elif rgn == "n": + region_title = "Northern Hemisphere" + elif rgn == "s": + region_title = "Southern Hemisphere" + else: + raise RuntimeError(f"Invalid rgn={rgn}") + param_dict = { + "2nd_var": False, + "axhline_y": None, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": True, + "default_ylim": [13, 15.5], + "do_add_line": True, + "do_add_trend": True, + "format": "%4.2f", + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": False, + "set_legend": True, + "shorten_year": False, + "title": f"{region_title} surface air temperature", + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"]["TREFHT"][0]) - 273.15, + "verbose": False, + "vol": False, + "ylabel": "degC", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 3 +def plot_toa_radiation(ax, xlim, exps, rgn): + logger.info("Plot 3: plot_toa_radiation") + param_dict = { + "2nd_var": True, + "axhline_y": None, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": False, + "default_ylim": [235, 245], + "do_add_line": False, + "do_add_trend": False, + "format": None, + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": False, + "set_legend": False, + "shorten_year": False, + "title": "TOA radiation: SW (solid), LW (dashed)", + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"]["FSNTOA"][0]), + "verbose": None, + "vol": None, + "ylabel": "W m-2", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 4 +def plot_net_atm_energy_imbalance(ax, xlim, exps, rgn): + logger.info("Plot 4: plot_net_atm_energy_imbalance") + param_dict = { + "2nd_var": False, + "axhline_y": None, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": True, + "default_ylim": [-0.3, 0.3], + "do_add_line": True, + "do_add_trend": False, + "format": "%4.2f", + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": False, + "set_legend": True, + "shorten_year": False, + "title": "Net atm energy imbalance (restom-ressurf)", + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"]["RESTOM"][0]) + - np.array(exp["annual"]["RESSURF"][0]), + "verbose": False, + "vol": False, + "ylabel": "W m-2", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 5 +def plot_change_ohc(ax, xlim, exps, rgn): + logger.info("Plot 5: plot_change_ohc") + param_dict = { + "2nd_var": False, + "axhline_y": 0, + "check_exp_ocean": True, + "check_exp_vol": False, + "check_exp_year": False, + "default_ylim": [-0.3e24, 0.9e24], + "do_add_line": False, + "do_add_trend": True, + "format": "%4.2f", + "glb_only": True, + "lw": 1.5, + "ohc": True, + "set_axhline": True, + "set_legend": True, + "shorten_year": True, + "title": "Change in ocean heat content", + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"]["ohc"]), + "verbose": False, + "vol": False, + "ylabel": "J", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 6 +def plot_max_moc(ax, xlim, exps, rgn): + logger.info("Plot 6: plot_max_moc") + param_dict = { + "2nd_var": False, + "axhline_y": 10, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": False, + "default_ylim": [4, 22], + "do_add_line": False, + "do_add_trend": True, + "format": "%4.2f", + "glb_only": True, + "lw": 1.5, + "ohc": False, + "set_axhline": True, + "set_legend": True, + "shorten_year": False, + "title": "Max MOC Atlantic streamfunction at 26.5N", + "use_getmoc": True, + "var": None, + "verbose": True, + "vol": None, + "ylabel": "Sv", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 7 +def plot_change_sea_level(ax, xlim, exps, rgn): + logger.info("Plot 7: plot_change_sea_level") + param_dict = { + "2nd_var": False, + "axhline_y": None, + "check_exp_ocean": False, + "check_exp_vol": True, + "check_exp_year": True, + "default_ylim": [4, 22], + "do_add_line": False, + "do_add_trend": True, + "format": "%5.3f", + "glb_only": True, + "lw": 1.5, + "ohc": False, + "set_axhline": False, + "set_legend": True, + "shorten_year": True, + "title": "Change in sea level", + "use_getmoc": False, + "var": lambda exp: ( + 1e3 + * np.array(exp["annual"]["volume"]) + / (4.0 * math.pi * (6371229.0) ** 2 * 0.7) + ), + "verbose": True, + "vol": True, + "ylabel": "mm", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# 8 +def plot_net_atm_water_imbalance(ax, xlim, exps, rgn): + logger.info("Plot 8: plot_net_atm_water_imbalance") + param_dict = { + "2nd_var": False, + "axhline_y": None, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": False, + "default_ylim": [-1, 1], + "do_add_line": True, + "do_add_trend": False, + "format": "%5.4f", + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": False, + "set_legend": True, + "shorten_year": False, + "title": "Net atm water imbalance (evap-prec)", + "use_getmoc": False, + "var": lambda exp: ( + 365 + * 86400 + * ( + np.array(exp["annual"]["QFLX"][0]) + - 1e3 + * ( + np.array(exp["annual"]["PRECC"][0]) + + np.array(exp["annual"]["PRECL"][0]) + ) + ) + ), + "verbose": False, + "vol": False, + "ylabel": "mm yr-1", + } + plot(ax, xlim, exps, param_dict, rgn) + + +# Generic plot function +def plot_generic(ax, xlim, exps, var_name, rgn): + logger.info(f"plot_generic for {var_name}") + param_dict = { + "2nd_var": False, + "axhline_y": 0, + "check_exp_ocean": False, + "check_exp_vol": False, + "check_exp_year": True, + "default_ylim": [], + "do_add_line": True, + "do_add_trend": True, + "format": "%4.2f", + "glb_only": False, + "lw": 1.0, + "ohc": False, + "set_axhline": False, + "set_legend": True, + "shorten_year": False, + "title": var_name, + "use_getmoc": False, + "var": lambda exp: np.array(exp["annual"][var_name][0]), + "verbose": False, + "vol": False, + "ylabel": lambda exp: np.array(exp["annual"][var_name][1]), + } + plot(ax, xlim, exps, param_dict, rgn) + + +# FIXME: C901 'plot' is too complex (19) +def plot(ax, xlim, exps, param_dict, rgn): # noqa: C901 + if param_dict["glb_only"] and (rgn != "glb"): + return + ax.set_xlim(xlim) + extreme_values = [] + for exp in exps: + # Relevant to "Plot 5: plot_change_ohc" + if param_dict["check_exp_ocean"] and (exp["ocean"] == ""): + continue + # Relevant to "Plot 7: plot_change_sea_level" + # This must be checked before plot 6, + # otherwise, `param_dict["var"]` will be run, + # but `exp["annual"]["volume"]` won't exist. + if param_dict["check_exp_vol"] and (exp["vol"] == ""): + continue + # Relevant to "Plot 6: plot_max_moc" + if param_dict["use_getmoc"]: + if exp["moc"]: + [year, var] = getmoc(exp["moc"]) + else: + continue + else: + year = np.array(exp["annual"]["year"]) + exp["yoffset"] + var = param_dict["var"](exp) + extreme_values.append(np.amax(var)) + extreme_values.append(np.amin(var)) + if param_dict["shorten_year"]: + year = year[: len(var)] + try: + ax.plot( + year, + var, + lw=param_dict["lw"], + marker=None, + c=exp["color"], + label=exp["name"], + ) + except Exception: + raise RuntimeError(f"{param_dict['title']} could not be plotted.") + if param_dict["2nd_var"]: + # Specifically for plot_toa_radiation + # TODO: if more plots require a 2nd variable, we can change `var` to be a list, + # but that will be a more significant refactoring. + var = np.array(exp["annual"]["FLUT"][0]) + ax.plot(year, var, lw=1.0, marker=None, ls=":", c=exp["color"]) + continue + if param_dict["check_exp_year"] and exp["yr"] is None: + continue + elif param_dict["do_add_line"] or param_dict["do_add_trend"]: + for yrs in exp["yr"]: + if param_dict["do_add_line"]: + add_line( + year, + var, + yrs[0], + yrs[1], + format=param_dict["format"], + ax=ax, + lw=2 * param_dict["lw"], + color=exp["color"], + ) + if param_dict["do_add_trend"]: + add_trend( + year, + var, + yrs[0], + yrs[1], + format=param_dict["format"], + ax=ax, + lw=2 * param_dict["lw"], + color=exp["color"], + ohc=param_dict["ohc"], + verbose=param_dict["verbose"], + vol=param_dict["vol"], + ) + ylim = get_ylim(param_dict["default_ylim"], extreme_values) + ax.set_ylim(ylim) + if param_dict["set_axhline"]: + ax.axhline(y=param_dict["axhline_y"], lw=1, c="0.5") + ax.set_title(param_dict["title"]) + ax.set_xlabel("Year") + units = param_dict["ylabel"] + c = callable(units) + if c: + units = units(exps[0]) + ax.set_ylabel(units) + if param_dict["set_legend"]: + ax.legend(loc="best") + + +PLOT_DICT = { + "net_toa_flux_restom": plot_net_toa_flux_restom, + "global_surface_air_temperature": plot_global_surface_air_temperature, + "toa_radiation": plot_toa_radiation, + "net_atm_energy_imbalance": plot_net_atm_energy_imbalance, + "change_ohc": plot_change_ohc, # only glb + "max_moc": plot_max_moc, # only glb + "change_sea_level": plot_change_sea_level, # only glb + "net_atm_water_imbalance": plot_net_atm_water_imbalance, +} + + +# FIXME: C901 'make_plot_pdfs' is too complex (20) +def make_plot_pdfs( # noqa: C901 + parameters: Parameters, + rgn, + component, + xlim, + exps, + plot_list, + valid_plots, + invalid_plots, +): + num_plots = len(plot_list) + if num_plots == 0: + return + plots_per_page = parameters.nrows * parameters.ncols + num_pages = math.ceil(num_plots / plots_per_page) + + counter = 0 + os.makedirs(parameters.results_dir, exist_ok=True) + # https://stackoverflow.com/questions/58738992/save-multiple-figures-with-subplots-into-a-pdf-with-multiple-pages + pdf = matplotlib.backends.backend_pdf.PdfPages( + f"{parameters.results_dir}/{parameters.figstr}_{rgn}_{component}.pdf" + ) + for page in range(num_pages): + if plots_per_page == 1: + fig = plt.figure(1, figsize=[13.5 / 2, 16.5 / 4]) + else: + fig = plt.figure(1, figsize=[13.5, 16.5]) + fig.suptitle(f"{parameters.figstr}_{rgn}_{component}") + for j in range(plots_per_page): + # The final page doesn't need to be filled out with plots. + if counter >= num_plots: + break + ax = plt.subplot(parameters.nrows, parameters.ncols, j + 1) + if component == "original": + try: + plot_function = PLOT_DICT[plot_list[counter]] + except KeyError: + raise KeyError(f"Invalid plot name: {plot_list[counter]}") + try: + plot_function(ax, xlim, exps, rgn) + valid_plots.append(plot_list[counter]) + except Exception: + traceback.print_exc() + plot_name = plot_list[counter] + required_vars = [] + if plot_name == "net_toa_flux_restom": + required_vars = ["RESTOM"] + elif plot_name == "net_atm_energy_imbalance": + required_vars = ["RESTOM", "RESSURF"] + elif plot_name == "global_surface_air_temperature": + required_vars = ["TREFHT"] + elif plot_name == "toa_radiation": + required_vars = ["FSNTOA", "FLUT"] + elif plot_name == "net_atm_water_imbalance": + required_vars = ["PRECC", "PRECL", "QFLX"] + logger.error( + f"Failed plot_function for {plot_name}. Check that {required_vars} are available." + ) + invalid_plots.append(plot_name) + counter += 1 + else: + try: + plot_name = plot_list[counter] + plot_generic(ax, xlim, exps, plot_name, rgn) + valid_plots.append(plot_name) + except Exception: + traceback.print_exc() + logger.error(f"plot_generic failed. Invalid plot={plot_name}") + invalid_plots.append(plot_name) + counter += 1 + + fig.tight_layout() + pdf.savefig(1) + if plots_per_page == 1: + fig.savefig( + f"{parameters.results_dir}/{parameters.figstr}_{rgn}_{component}_{plot_name}.png", + dpi=150, + ) + elif num_pages > 1: + fig.savefig( + f"{parameters.results_dir}/{parameters.figstr}_{rgn}_{component}_{page}.png", + dpi=150, + ) + else: + fig.savefig( + f"{parameters.results_dir}/{parameters.figstr}_{rgn}_{component}.png", + dpi=150, + ) + plt.clf() + pdf.close() + + +# Run coupled_global ########################################################## +def run(parameters: Parameters, requested_variables: RequestedVariables, rgn: str): + # Experiments + exps: List[Dict[str, Any]] = process_data(parameters, requested_variables, rgn) + + xlim: List[float] = [float(parameters.year1), float(parameters.year2)] + + valid_plots: List[str] = [] + invalid_plots: List[str] = [] + + # Use list of tuples rather than a dict, to keep order + mapping: List[Tuple[str, List[str]]] = [ + ("original", parameters.plots_original), + ("atm", parameters.plots_atm), + ("ice", parameters.plots_ice), + ("lnd", parameters.plots_lnd), + ("ocn", parameters.plots_ocn), + ] + for component, plot_list in mapping: + make_plot_pdfs( + parameters, + rgn, + component, + xlim, + exps, + plot_list, + valid_plots, + invalid_plots, + ) + logger.info(f"These {rgn} region plots generated successfully: {valid_plots}") + logger.error( + f"These {rgn} regions plots could not be generated successfully: {invalid_plots}" + ) + + +def coupled_global(parameters: Parameters) -> None: + requested_variables = RequestedVariables(parameters) + for rgn in parameters.regions: + run(parameters, requested_variables, rgn) diff --git a/zppy_interfaces/global_time_series/ocean_month.py b/zppy_interfaces/global_time_series/ocean_month.py new file mode 100644 index 0000000..9331cb9 --- /dev/null +++ b/zppy_interfaces/global_time_series/ocean_month.py @@ -0,0 +1,136 @@ +# Compute time series of ocean heat content (ohc) using MPAS-O output + +import glob +from datetime import datetime + +import numpy as np +from mpas_tools.cime.constants import constants +from netCDF4 import Dataset, chartostring, date2num + +from zppy_interfaces.multi_utils.logger import _setup_custom_logger + +logger = _setup_custom_logger(__name__) + + +def ocean_month( + path_in: str, case_dir: str, start_yr: int, end_yr: int, ts_num_years: int +): + path_out = f"{case_dir}/post/ocn/glb/ts/monthly/{ts_num_years}yr" + + # Ocean constants + # specific heat [J/(kg*degC)] + cp = constants["SHR_CONST_CPSW"] + # [kg/m3] + rho = constants["SHR_CONST_RHOSW"] + # [J/(degC*m3)] + fac = rho * cp + + # Time units, calendar + tcalendar = "noleap" + tunits = f"days since {start_yr:04d}-01-01 00:00:00" + + # Loop over year sets + for y in range(start_yr, end_yr, ts_num_years): + + year1 = y + year2 = y + ts_num_years - 1 + files = [] + for year in range(year1, year2 + 1): + logger.info(f"year={year}") + inFiles = ( + f"{path_in}/*mpaso.hist.am.timeSeriesStatsMonthly.{year:04d}-??-??.nc" + ) + files.extend(sorted(glob.glob(inFiles))) + out = f"{path_out}/mpaso.glb.{year1:04d}01-{year2:04d}12.nc" + + # Create output file + fout = Dataset(out, "w", format="NETCDF4_CLASSIC") + fout.createDimension("time", None) + fout.createDimension("nbnd", 2) + + time = fout.createVariable("time", "f8", ("time",)) + time.long_name = "time" + time.units = tunits + time.calendar = tcalendar + time.bounds = "time_bnds" + + time_bnds = fout.createVariable("time_bnds", "f8", ("time", "nbnd")) + time_bnds.long_name = "time interval endpoints" + + ohc = fout.createVariable("ohc", "f8", ("time",)) + ohc.long_name = "total ocean heat content" + ohc.units = "J" + + volume = fout.createVariable("volume", "f8", ("time",)) + volume.long_name = "sum of the volumeCell variable over the full domain, used to normalize global statistics" + volume.units = "m^3" + + # OHC from monthly time series + itime = 0 + for file in files: + + # Open current input file + logger.info(f"mpaso file: {file}") + f = Dataset(file, "r") + + # Time variables + xtime_startMonthly = chartostring(f.variables["xtime_startMonthly"][:]) + xtime_endMonthly = chartostring(f.variables["xtime_endMonthly"][:]) + + # Convert to datetime objects (assuming 0 UTC boundary) + date_start = np.array( + [datetime.strptime(x[0:10], "%Y-%m-%d") for x in xtime_startMonthly] + ) + date_end = np.array( + [datetime.strptime(x[0:10], "%Y-%m-%d") for x in xtime_endMonthly] + ) + + # Convert to netCDF4 time + tstart = date2num(date_start, tunits, tcalendar) + tend = date2num(date_end, tunits, tcalendar) + t = 0.5 * (tstart + tend) + + # Variables needed to compute global OHC + iregion = 6 # global average region + sumLayerMaskValue = f.variables[ + "timeMonthly_avg_avgValueWithinOceanLayerRegion_sumLayerMaskValue" + ][:, iregion, :] + avgLayerArea = f.variables[ + "timeMonthly_avg_avgValueWithinOceanLayerRegion_avgLayerArea" + ][:, iregion, :] + avgLayerThickness = f.variables[ + "timeMonthly_avg_avgValueWithinOceanLayerRegion_avgLayerThickness" + ][:, iregion, :] + avgLayerTemperature = f.variables[ + "timeMonthly_avg_avgValueWithinOceanLayerRegion_avgLayerTemperature" + ][:, iregion, :] + + # volumeCellGlobal + volumeCell = f.variables["timeMonthly_avg_volumeCellGlobal"][:] + + # Close current input file + f.close() + + # Compute OHC + layerArea = sumLayerMaskValue * avgLayerArea + layerVolume = layerArea * avgLayerThickness + tmp = layerVolume * avgLayerTemperature + ohc_tot = fac * np.sum(tmp, axis=1) + + # Diagnostics printout + for i in range(len(date_start)): + logger.info( + f"Start, End, OHC = {date_start[i]} ({tstart[i]}), {date_end[i]} ({tend[i]}), {ohc_tot[i]:.2e}" + ) + + # Write data + time[itime:] = t + time_bnds[itime:, 0] = tstart + time_bnds[itime:, 1] = tend + ohc[itime:] = ohc_tot + volume[itime:] = volumeCell + + itime = itime + len(t) + + # Close output file + fout.close() diff --git a/zppy_interfaces/global_time_series/utils.py b/zppy_interfaces/global_time_series/utils.py new file mode 100644 index 0000000..e6f92c0 --- /dev/null +++ b/zppy_interfaces/global_time_series/utils.py @@ -0,0 +1,63 @@ +from typing import Dict, List + + +# Parameters ################################################################## +class Parameters(object): + def __init__(self, args: Dict[str, str]): + + # For ocean_month + self.use_ocn: bool = _str2bool(args["use_ocn"]) + self.input: str = args["input"] + self.input_subdir: str = args["input_subdir"] + self.moc_file: str = args["moc_file"] + + # For coupled_global + self.case_dir: str = args["case_dir"] + self.experiment_name: str = args["experiment_name"] + self.figstr: str = args["figstr"] + self.color: str = args["color"] + self.ts_num_years_str: str = args["ts_num_years"] + self.plots_original: List[str] = param_get_list(args["plots_original"]) + self.atmosphere_only: bool = _str2bool(args["atmosphere_only"]) + self.plots_atm: List[str] = param_get_list(args["plots_atm"]) + self.plots_ice: List[str] = param_get_list(args["plots_ice"]) + self.plots_lnd: List[str] = param_get_list(args["plots_lnd"]) + self.plots_ocn: List[str] = param_get_list(args["plots_ocn"]) + self.nrows: int = int(args["nrows"]) + self.ncols: int = int(args["ncols"]) + self.results_dir: str = args["results_dir"] + # These regions are used often as strings, + # so making an Enum Region={GLOBAL, NORTH, SOUTH} would be limiting. + self.regions: List[str] = list( + map(lambda rgn: get_region(rgn), args["regions"].split(",")) + ) + + # For both + self.year1: int = int(args["start_yr"]) + self.year2: int = int(args["end_yr"]) + + +def _str2bool(s: str) -> bool: + return s.lower() == "true" + + +def param_get_list(param_value: str) -> List[str]: + if param_value == "None": + return [] + else: + return param_value.split(",") + + +def get_region(rgn: str) -> str: + if rgn.lower() in ["glb", "global"]: + rgn = "glb" + elif rgn.lower() in ["n", "north", "northern"]: + rgn = "n" + elif rgn.lower() in ["s", "south", "southern"]: + rgn = "s" + else: + raise ValueError(f"Invalid rgn={rgn}") + return rgn + + +############################################################################### diff --git a/zppy_interfaces/multi_utils/__init__.py b/zppy_interfaces/multi_utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zppy_interfaces/multi_utils/logger.py b/zppy_interfaces/multi_utils/logger.py new file mode 100644 index 0000000..58e361c --- /dev/null +++ b/zppy_interfaces/multi_utils/logger.py @@ -0,0 +1,67 @@ +"""Logger module for setting up a logger. Based on xcdat/xcdat/_logger.py""" + +import logging +import logging.handlers + +# Logging module setup +log_format = ( + "%(asctime)s [%(levelname)s]: %(filename)s(%(funcName)s:%(lineno)s) >> %(message)s" +) +logging.basicConfig(format=log_format, filemode="w", level=logging.INFO) + +# Console handler setup +console_handler = logging.StreamHandler() +console_handler.setLevel(logging.INFO) +logFormatter = logging.Formatter(log_format) +console_handler.setFormatter(logFormatter) +logging.getLogger().addHandler(console_handler) + + +def _setup_custom_logger(name, propagate=True) -> logging.Logger: + """Sets up a custom logger. + + Documentation on logging: https://docs.python.org/3/library/logging.html + + Parameters + ---------- + name : str + Name of the file where this function is called. + propagate : bool, optional + Whether to propagate logger messages or not, by default False + + Returns + ------- + logging.Logger + The logger. + + Examples + --------- + Available levels: https://docs.python.org/3/library/logging.html#levels + + Detailed information, typically of interest only when diagnosing problems: + + >>> logger.debug("") + + Confirmation that things are working as expected: + + >>> logger.info("") + + An indication that something unexpected happened, or indicative of some + problem in the near future: + + >>> logger.warning("") + + The software has not been able to perform some function due to a more + serious problem: + + >>> logger.error("") + + A serious error, indicating that the program itself may be unable to + continue running: + + >>> logger.critical("") + """ + logger = logging.getLogger(name) + logger.propagate = propagate + + return logger diff --git a/zppy_interfaces/version.py b/zppy_interfaces/version.py new file mode 100644 index 0000000..f102a9c --- /dev/null +++ b/zppy_interfaces/version.py @@ -0,0 +1 @@ +__version__ = "0.0.1"