From 7b82c00c825173f006d099f60782b5d34ee6436d Mon Sep 17 00:00:00 2001 From: Jacob Mims <122570226+jtmims@users.noreply.github.com> Date: Thu, 19 Dec 2024 09:59:22 -0600 Subject: [PATCH] Sync container branch with main (#724) * Container (#678) * Create Dockerfile works with synthetic example_multicase POD * Update Dockerfile * Update Dockerfile * Create docker-build-and-push.yml * Update docker-build-and-push.yml * Update docker-build-and-push.yml * Update docker-build-and-push.yml * Update docker-build-and-push.yml * Container Documentation (#687) * Create container_config_demo.jsonc * Create container_cat.csv * Create container_cat.json * Update container_config_demo.jsonc * docs * Update ref_container.rst * Update ref_container.rst * Update ref_container.rst * Update ref_container.rst * Update ref_container.rst * Update dev_start.rst * Update ref_container.rst * Update dev_start.rst * Update ref_container.rst * Update doc/sphinx/dev_start.rst Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> * Update doc/sphinx/ref_container.rst Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> * Update doc/sphinx/ref_container.rst Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> * Update doc/sphinx/ref_container.rst Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> * Update doc/sphinx/dev_start.rst Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> --------- Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> * Fix ci bugs (#688) * fix unresolved conda_root ref in pod_setup comment out no_translation setting for matching POD and runtime conventions for testing * fix coord_name def in translate_coord * define var_id separately in pp query * change new_coord definition to obtain ordered dict instead of generator object in translation.create_scalar_name so that deepcopy can pickle it * change logic in pod_setup to set translation object to no_translation only if translate_data is false in runtime config file * uncomment more set1 pods that pass initial testing in house * add checks for no_translation data source and assign query atts using the var object instead of the var.translation object if True to preprocessor * remove old comment from preprocessor * change value for for hourly data search in datelabel get_timedelta_kwargs to return 1hr instead of hr so that the frequency for hourly data matchew required catalog specification * comment out some set1 tests, since they are timing out on CI * rename github actions test config files split group 1 CI tests into 2 runs to avoid timeout issues * update mdtf_tests.yml to reference new config file names and clean up deprecated calls * update mdtf_tests.yml * update matrix refs in mdtf_tests.yml * revert changes to datelabel and move hr --> 1hr freq conversion to preprocessor * delete old test files just run 1 POD in set1 tests try adding timeouts mdtf_tests.yml * fix typo in timeout call in mdtf_tests * fix GFDL entries in test catalogs * fix varid entries for wvp in test catalogs * change atmosphere_mass_content_of_water_vapor id from prw to wvp in gfdl field table * comment out long_name check in translation.py * define src_unit for coords if available in preprocessor.ConvertUnitsFunction redefine dest_unit using var.units.units so that parm is a string instead of a Units.units object in call to units.convert_dataarray * log warning instead of raising error if attr name doesn't match in xr_parser.compare_attr so that values can be converted later * fix variable refs in xarray datasets in units.convertdatarray add check to convert mb to hPa to convertdataarray * fix frequency entries for static vars in test catalogs * remove duplicate realm entries from stc_eddy_heat_fluxes settings file * remove non alphanumeric chars from atts in xr_parser check_metadata * comment out non-working PODs in set 3 tests * Remove timeout lines and comment unused test tarballs in mdtf_tests.yml * infer 'start_time' and 'end_time' from 'time_range' due to type issues (#691) * infer 'start_time' and 'end_time' from 'time_range' due to type issues * add warning * fix ci issue * move line setting date_range in query_catalog() (#693) * move line setting date_range in query_catalog() * cleanup print * Remove modifier entry from areacello in trop_pac_sea_lev POD settings file * Fix issues in pp query (#692) * fix hr -> 1hr freq conversion in pp query try using regex string contains standard_name in query * add check for parameter type to xr_parser approximate_attribute_value * remove regex from pp query standard_name * add check that bounds is populated in cf.assessor, then check coord attrs and only run coord bounds check if bounda s are not None in xr_parser * add escape brackets to command-line commands (#694) * Fix convective_transition_diag POD (#695) * fix ctd file formatting and typos * more formatting and typo fixes in ctd POD * uncomment convective transistion diag POD in 1a CI test config files * try moving convective_transition_pod to ubuntu suite 2 tests * add wkdir cleanup between each test run step and separate obs data fetching for set 1 tests in ci config file * move convective_transition_diag POD to set 1b tests * just run 1 POD in set 1a and 2 PODs in set 1b to avoid runner timeouts * reorganize 1b tests * add ua200-850 and va200-850 to gfld-cmor-tables (#696) * add ice/ocean precip entries to GFDL fieldlist (#697) * Add alternate standard names entry to fieldlists and varlistEntry objects (#699) * add alternate_stanadard_names entries to precipitation_flux vars in CMIP and GFDL fieldlists add list of applicable realms to preciptitation flux * add alternate_standard_names attributes and property setters to DMDependentvariable class that is VarlistEntry parent class define realm parm as string or list * extend realm search in fieldlist lookup tables to use a realm list in the translation add list to realm type hints in translation module * extend standard_name query to list that includes alternate_standard_names if present in the translation object * break up rainfall_flux and precipitation_flux entries in CMIP and GFDL field tables since translator can't parse realm list correctly * revert realm type hints defined as string or list and casting realm strings to listsin translation module * change assertion to log errof if translation is None in varlist_util * define new standard_name for pp xarray vars using the translation standard_name if the query standard name is a list with alternates instead of a string * add function check_multichunk to fix issue with chunk_freqs (#701) * add function check_multichunk to fix issue with chunk_freqs * fix function comment grammar grammar grammar * move log warning * add plots link to pod_error_snippet.html (#705) * add plots link to pod_error_snippet.html * remove empty line * add variable table tool and put output into docs (#706) * add variable table script to docs * move file * Delete tools/get_POD_varname/MDTF_Variable_Lists.html * rework ref_vartable.rst to link directly to html file of the table (#707) * rework ref_vartable.rst to link directly to html file of the table * Delete doc/sphinx/MDTF_Variable_Lists.html * Update MDTF_Variable_Lists.html * remove example_pp_script.py from user_pp_scripts list in multirun_config_template.jsonc * remove .nc files found in OUTPUT_DIR depending on config file (#710) * fix formatting issues in output reference documentation (#711) * fix forcing_feedback settings.jsonc formatting and remove extra freq entries * Add check for user_pp_scripts attribute in config object to DaskMultifilePP init method * add check for user_pp-scripts attr to execute_pp_functions * update 'standard_name' for each var in write_pp_catalog (#713) * Update docs about --env_dir flag (#715) * Update README.md * Update start_install.rst * fix logic when defining log messages in pod_setup * Fix dummy translation method in NoTranslationFieldlist (#717) * define missing entries in dummy translation object returned by NoTranslationFieldlist.translate add logic to determine alternate_standard_names attribute to NoTranslationFieldlist.translate * set translate_data to false for testing * edit logging message for no translation setting in pod_setup * add todo to translation translate_coord and cleanup comments * remove checks for no_translation from preprocessor * define TranslatedVarlistEntry name attribute using data convention field table variable id * revert debugging changes from test config file * update docs for translate_data flag in the runtime config file * fix variable_id and var_id refs in dummy translate method * Reimplement crop date range capability (#718) * add placeholder functions for date range cropping * refine crop_date_range function. Need to figure out how to pass calendar from subset df * continue reworking crop_date_range * revert changes to check_group_daterange, and add check that input files overlap start and end times add option aggregate=false to to_dataset_dict call look into replaceing check_time_bounds with crop date range call before the xarray merge * reorder crop_date_range call add calls to parse xr time coord and define start and end times for dataset * finalize logic in crop_date_range * remove start_time and end_time from, and add time_range column to catalog generated by define_pp_catalog_assets * replace start_time and end_time entries with time_range entries populated from information in processed xarray dataset in write_pp_catalog * remove unused dask import from preprocessor * replace hard coded time dimension name with var.T.name in call to xarray concatenate * add check_time_bounds call back to query and fix definitions for modified start and end points so that they use the dataset information * fix hour, min, sec defs in crop_date_range for new start and end times * strip non-numeric chars from strings passed to _coerce_to_datetime * add logic to define start and end points for situation where desired date range is contained by xarray dataset to crop_date_range * Create drop attributes func (#720) * fix forcing_feedback settings formatting * add check for user_pp_scripts attribute before looping through list to multifilepreprocessor add_user_pp_scripts method * add snakeviz to env_dev.yml * move drop_atts loop to a separate function that is called by crop_date_range and before merging xradate_range and before merging datasets in query_catalog in the preprocessor * Update mdtf dev env file (#722) * add snakeviz, gprof2dot, and intake-esgf packages to env_dev file * add viztracer to dev environment file * add kerchunk package to dev environment * Fix various pp issues related to running seaice_suite (#721) * fix pp issues for seaice_suite * fix arg issue * rename functions * add default return for conversion function --------- Co-authored-by: Aparna Radhakrishnan Co-authored-by: Jess <20195932+wrongkindofdoctor@users.noreply.github.com> --- .github/workflows/mdtf_tests.yml | 110 +- README.md | 4 +- data/fieldlist_CMIP.jsonc | 8 + data/fieldlist_GFDL.jsonc | 18 +- data/gfdl-cmor-tables/gfdl_to_cmip5_vars.csv | 4 + .../convecTransBasic.py | 13 +- .../convecTransBasic_usp_plot.py | 4 +- .../convecTransBasic_util.py | 1313 ++++---- .../convecTransCriticalCollapse.py | 41 +- .../convecTransCriticalCollapse_usp.py | 408 ++- .../convecTransCriticalCollapse_util.py | 946 +++--- .../convective_transition_diag.html | 2 +- .../convective_transition_diag_v2.py | 18 +- .../example_multicase/container_cat.csv | 3 + .../example_multicase/container_cat.json | 181 ++ .../container_config_demo.jsonc | 117 + .../multirun_config_template.jsonc | 2 +- diagnostics/forcing_feedback/settings.jsonc | 38 +- .../seaice_suite_sic_mean_sigma.py | 2 +- .../stc_eddy_heat_fluxes/settings.jsonc | 3 - .../tropical_pacific_sea_level/settings.jsonc | 1 - doc/_static/MDTF_Variable_Lists.html | 2694 +++++++++++++++++ doc/conf.py | 2 +- doc/sphinx/dev_start.rst | 4 + doc/sphinx/ref_container.rst | 89 + doc/sphinx/ref_output.rst | 5 +- doc/sphinx/ref_toc.rst | 2 + doc/sphinx/ref_vartable.rst | 5 + doc/sphinx/start_config.rst | 6 +- doc/sphinx/start_install.rst | 3 - src/conda/env_dev.yml | 30 +- src/data_model.py | 17 +- src/data_sources.py | 47 +- src/environment_manager.py | 2 +- src/html/pod_error_snippet.html | 5 +- src/output_manager.py | 19 + src/pod_setup.py | 17 +- src/preprocessor.py | 378 ++- src/translation.py | 64 +- src/units.py | 21 +- src/util/catalog.py | 2 +- src/util/datelabel.py | 16 +- src/varlist_util.py | 3 +- src/xr_parser.py | 83 +- tests/esm_catalog_test_macos.csv | 18 +- tests/esm_catalog_test_ubuntu.csv | 18 +- ...onc => github_actions_test_macos_1a.jsonc} | 7 +- tests/github_actions_test_macos_1b.jsonc | 81 + ...sonc => github_actions_test_macos_2.jsonc} | 0 ...sonc => github_actions_test_macos_3.jsonc} | 8 +- ...nc => github_actions_test_ubuntu_1a.jsonc} | 11 +- tests/github_actions_test_ubuntu_1b.jsonc | 80 + ...onc => github_actions_test_ubuntu_2.jsonc} | 0 ...onc => github_actions_test_ubuntu_3.jsonc} | 8 +- tools/get_POD_varname/get_POD_varname.py | 167 + 55 files changed, 5446 insertions(+), 1702 deletions(-) create mode 100644 diagnostics/example_multicase/container_cat.csv create mode 100644 diagnostics/example_multicase/container_cat.json create mode 100644 diagnostics/example_multicase/container_config_demo.jsonc create mode 100644 doc/_static/MDTF_Variable_Lists.html create mode 100644 doc/sphinx/ref_container.rst create mode 100644 doc/sphinx/ref_vartable.rst rename tests/{github_actions_test_macos_set1.jsonc => github_actions_test_macos_1a.jsonc} (96%) create mode 100644 tests/github_actions_test_macos_1b.jsonc rename tests/{github_actions_test_macos_set2.jsonc => github_actions_test_macos_2.jsonc} (100%) rename tests/{github_actions_test_macos_set3.jsonc => github_actions_test_macos_3.jsonc} (97%) rename tests/{github_actions_test_ubuntu_set1.jsonc => github_actions_test_ubuntu_1a.jsonc} (94%) create mode 100644 tests/github_actions_test_ubuntu_1b.jsonc rename tests/{github_actions_test_ubuntu_set2.jsonc => github_actions_test_ubuntu_2.jsonc} (100%) rename tests/{github_actions_test_ubuntu_set3.jsonc => github_actions_test_ubuntu_3.jsonc} (97%) create mode 100644 tools/get_POD_varname/get_POD_varname.py diff --git a/.github/workflows/mdtf_tests.yml b/.github/workflows/mdtf_tests.yml index 80699f751..b6e00913d 100644 --- a/.github/workflows/mdtf_tests.yml +++ b/.github/workflows/mdtf_tests.yml @@ -19,30 +19,29 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-13] - json-file: ["tests/github_actions_test_ubuntu_set1.jsonc","tests/github_actions_test_macos_set1.jsonc"] - json-file-set2: ["tests/github_actions_test_ubuntu_set2.jsonc", "tests/github_actions_test_macos_set2.jsonc"] - json-file-set3: ["tests/github_actions_test_ubuntu_set3.jsonc", "tests/github_actions_test_macos_set3.jsonc"] + json-file-1a: ["tests/github_actions_test_ubuntu_1a.jsonc","tests/github_actions_test_macos_1a.jsonc"] + json-file-1b: ["tests/github_actions_test_ubuntu_1b.jsonc","tests/github_actions_test_macos_1b.jsonc"] + json-file-2: ["tests/github_actions_test_ubuntu_2.jsonc", "tests/github_actions_test_macos_2.jsonc"] + json-file-3: ["tests/github_actions_test_ubuntu_3.jsonc", "tests/github_actions_test_macos_3.jsonc"] # if experimental is true, other jobs to run if one fails experimental: [false] exclude: - os: ubuntu-latest - json-file: "tests/github_actions_test_macos_set1.jsonc" + json-file-1a: "tests/github_actions_test_macos_1a.jsonc" - os: ubuntu-latest - json-file-set2: "tests/github_actions_test_macos_set2.jsonc" + json-file-1b: "tests/github_actions_test_macos_1b.jsonc" - os: ubuntu-latest - json-file-set3: "tests/github_actions_test_macos_set3.jsonc" - - os: macos-12 - json-file: "tests/github_actions_test_ubuntu_set1.jsonc" - - os: macos-12 - json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc" - - os: macos-12 - json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc" + json-file-2: "tests/github_actions_test_macos_2.jsonc" + - os: ubuntu-latest + json-file-3: "tests/github_actions_test_macos_3.jsonc" + - os: macos-13 + json-file-1a: "tests/github_actions_test_ubuntu_1a.jsonc" - os: macos-13 - json-file: "tests/github_actions_test_ubuntu_set1.jsonc" + json-file-1b: "tests/github_actions_test_ubuntu_1b.jsonc" - os: macos-13 - json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc" + json-file-2: "tests/github_actions_test_ubuntu_2.jsonc" - os: macos-13 - json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc" + json-file-3: "tests/github_actions_test_ubuntu_3.jsonc" max-parallel: 3 steps: - uses: actions/checkout@v3 @@ -62,19 +61,13 @@ jobs: condarc: | channels: - conda-forge - - - name: Install XQuartz if macOS - if: ${{ matrix.os == 'macos-12' || matrix.os == 'macos-13'}} + - name: Set conda environment variables for macOS + if: ${{ matrix.os == 'macos-13' }} run: | - echo "Installing XQuartz" - brew install --cask xquartz echo "CONDA_ROOT=$(echo /Users/runner/micromamba)" >> $GITHUB_ENV echo "MICROMAMBA_EXE=$(echo /Users/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV echo "CONDA_ENV_DIR=$(echo /Users/runner/micromamba/envs)" >> $GITHUB_ENV - - name: Set environment variables - run: | - echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV - - name: Set conda vars + - name: Set conda environment variables for ubuntu if: ${{ matrix.os == 'ubuntu-latest' }} run: | echo "MICROMAMBA_EXE=$(echo /home/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV @@ -84,7 +77,7 @@ jobs: run: | echo "Installing Conda Environments" echo "conda root ${CONDA_ROOT}" - echo "env dir ${CONDA_ENV_DIR}" + echo "env dir ${CONDA_ENV_DIR}" # MDTF-specific setup: install all conda envs ./src/conda/micromamba_env_setup.sh --all --micromamba_root ${CONDA_ROOT} --micromamba_exe ${MICROMAMBA_EXE} --env_dir ${CONDA_ENV_DIR} echo "Creating the _MDTF_synthetic_data environment" @@ -104,7 +97,7 @@ jobs: mkdir wkdir ## make input data directories mkdir -p inputdata/obs_data - - name: Get Observational Data for Set 1 + - name: Get Observational Data for Set 1a run: | echo "${PWD}" cd ../ @@ -113,39 +106,56 @@ jobs: # attempt FTP data fetch # allow 20 min for transfer before timeout; Github actions allows 6 hours for individual # jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos. - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar + # curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar - echo "Untarring set 1 NCAR/CESM standard test files" - tar -xvf convective_transition_diag_obs_data.tar - tar -xvf EOF_500hPa_obs_data.tar + echo "Untarring set 1a NCAR/CESM standard test files" + # tar -xvf EOF_500hPa_obs_data.tar tar -xvf precip_diurnal_cycle_obs_data.tar - tar -xvf MJO_teleconnection_obs_data.tar - tar -xvf MJO_suite_obs_data.tar tar -xvf Wheeler_Kiladis_obs_data.tar # clean up tarballs rm -f *.tar - - name: Run diagnostic tests set 1 + - name: Run diagnostic tests set 1a run: | - echo "POD_OUTPUT is: " + echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV + echo "POD_OUTPUT is " echo "${POD_OUTPUT}" micromamba activate _MDTF_base # trivial check that install script worked ./mdtf_framework.py --help # run the test PODs - ./mdtf -f ${{matrix.json-file}} + ./mdtf -f ${{matrix.json-file-1a}} # Debug POD log(s) # cat ${POD_OUTPUT}/MDTF_NCAR.Synthetic_1975_1981/Wheeler_Kiladis/Wheeler_Kiladis.log + - name: Get observational data for set 1b + run: | + # clean up data from previous runs + echo "deleting data from set 1a" + cd ../wkdir + rm -rf * + cd ../inputdata/obs_data + rm -rf * + cd ../../ + curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar + curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar + curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar + tar -xvf MJO_teleconnection_obs_data.tar + tar -xvf MJO_suite_obs_data.tar + tar -xvf convective_transition_diag_obs_data.tar + # clean up tarballs + rm -f *.tar + - name: Run diagnostic tests set 1b + run: | + ./mdtf -f ${{matrix.json-file-1b}} - name: Get observational data for set 2 run: | echo "${PWD}" # remove data from previous run # Actions moves you to the root repo directory in every step, so need to cd again + echo "deleting data from set 1b" + cd ../wkdir + rm -rf * cd ../inputdata/obs_data - echo "deleting obs data from set 1" rm -rf * cd ../../ echo "Available Space" @@ -160,9 +170,7 @@ jobs: rm -f *.tar - name: Run diagnostic tests set 2 run: | - micromamba activate _MDTF_base - # run the test PODs - ./mdtf -f ${{matrix.json-file-set2}} + ./mdtf -f ${{matrix.json-file-2}} # Uncomment the following line for debugging #cat ../wkdir/MDTF_GFDL.Synthetic_1_10/MJO_prop_amp/MJO_prop_amp.log - name: Get observational data for set 3 @@ -170,8 +178,10 @@ jobs: echo "${PWD}" # remove data from previous run # Actions moves you to the root repo directory in every step, so need to cd again + echo "deleting data from set 2" + cd ../wkdir + rm -rf * cd ../inputdata/obs_data - echo "deleting obs data from set 2" rm -rf * cd ../../ echo "Available Space" @@ -181,19 +191,19 @@ jobs: # jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos. #curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar #curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar + #curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar # curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/albedofb_obs_data.tar --output albedofb_obs_data.tar - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/seaice_suite_obs_data.tar --output seaice_suite_obs_data.tar - curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/stc_eddy_heat_fluxes_obs_data.tar --output stc_eddy_heat_fluxes_obs_data.tar + #curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/seaice_suite_obs_data.tar --output seaice_suite_obs_data.tar + #curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/stc_eddy_heat_fluxes_obs_data.tar --output stc_eddy_heat_fluxes_obs_data.tar echo "Untarring set 3 CMIP standard test files" #tar -xvf temp_extremes_distshape_obs_data.tar #tar -zxvf tropical_pacific_sea_level_obs_data.tar.gz - tar -xvf mixed_layer_depth_obs_data.tar + #tar -xvf mixed_layer_depth_obs_data.tar tar -xvf ocn_surf_flux_diag_obs_data.tar # tar -xvf albedofb_obs_data.tar - tar -xvf seaice_suite_obs_data.tar - tar -xvf stc_eddy_heat_fluxes_obs_data.tar + # tar -xvf seaice_suite_obs_data.tar + # tar -xvf stc_eddy_heat_fluxes_obs_data.tar # clean up tarballs rm -f *.tar rm -f *.tar.gz @@ -201,7 +211,7 @@ jobs: run: | micromamba activate _MDTF_base # run the test PODs - ./mdtf -f ${{matrix.json-file-set3}} + ./mdtf -f ${{matrix.json-file-3}} #- name: Run unit tests # run: | # micromamba activate _MDTF_base diff --git a/README.md b/README.md index c26e9d11e..b6c221578 100644 --- a/README.md +++ b/README.md @@ -107,9 +107,7 @@ for, the Windows Subsystem for Linux. when micromamba is installed - `$MICROMAMBA_EXE` is full path to the micromamba executable on your system (e.g., /home/${USER}/.local/bin/micromamba). This is defined by the `MAMBA_EXE` environment variable on your system - - The `--env_dir` flag allows you to put the program files in a designated location `$CONDA_ENV_DIR` - (for space reasons, or if you don’t have write access). - You can omit this flag, and the environments will be installed within `$CONDA_ROOT/envs/` by default. + - All flags noted for your system above must be supplied for the script to work. #### NOTE: The micromamba environments may differ from the conda environments because of package compatibility discrepancies between solvers `% ./src/conda/micromamba_env_setup.sh --all --micromamba_root $MICROMAMBA_ROOT --micromamba_exe $MICROMAMBA_EXE --env_dir $CONDA_ENV_DIR` builds diff --git a/data/fieldlist_CMIP.jsonc b/data/fieldlist_CMIP.jsonc index 737ef1e09..e35f89d29 100644 --- a/data/fieldlist_CMIP.jsonc +++ b/data/fieldlist_CMIP.jsonc @@ -181,6 +181,14 @@ "standard_name": "precipitation_flux", "realm": "atmos", "units": "kg m-2 s-1", + "alternate_standard_names": ["rainfall_flux"], + "ndim": 3 + }, + "rainfall_flux": { + "standard_name": "rainfall_flux", + "realm": "seaIce", + "units": "kg m-2 s-1", + "alternate_standard_names": ["precipitation_flux"], "ndim": 3 }, "prc": { diff --git a/data/fieldlist_GFDL.jsonc b/data/fieldlist_GFDL.jsonc index 2c9ffb1fb..16c225a38 100644 --- a/data/fieldlist_GFDL.jsonc +++ b/data/fieldlist_GFDL.jsonc @@ -163,7 +163,13 @@ "realm": "atmos", "units": "1", "ndim": 3 - }, + }, + "siconc": { + "standard_name": "sea_ice_area_fraction", + "realm": "seaIce", + "units": "0-1", + "ndim": 3 + }, "IWP": { "standard_name": "atmosphere_mass_content_of_cloud_ice", "long_name": "Ice water path", @@ -191,6 +197,14 @@ "long_name":"", "realm": "atmos", "units": "kg m-2 s-1", + "alternate_standard_names": ["rainfall_flux"], + "ndim": 3 + }, + "rainfall_flux": { + "standard_name": "rainfall_flux", + "realm": "seaIce", + "units": "kg m-2 s-1", + "alternate_standard_names": ["precipitation_flux"], "ndim": 3 }, "prec_conv": { @@ -214,7 +228,7 @@ "units": "kg m-2 s-1", "ndim": 3 }, - "prw": { + "wvp": { "standard_name": "atmosphere_mass_content_of_water_vapor", "long_name": "Water Vapor Path", "realm": "atmos", diff --git a/data/gfdl-cmor-tables/gfdl_to_cmip5_vars.csv b/data/gfdl-cmor-tables/gfdl_to_cmip5_vars.csv index f20ffc3de..3a05e763a 100644 --- a/data/gfdl-cmor-tables/gfdl_to_cmip5_vars.csv +++ b/data/gfdl-cmor-tables/gfdl_to_cmip5_vars.csv @@ -202,6 +202,8 @@ dfe,dfe,mole_concentration_of_dissolved_iron_in_sea_water,Dissolved Iron Concent cfadDbze94,cfadDbze94,histogram_of_equivalent_reflectivity_factor_over_height_above_reference_ellipsoid,CloudSat Radar Reflectivity CFAD,atmos,1 dissic,dissic,mole_concentration_of_dissolved_inorganic_carbon_in_sea_water,Dissolved Inorganic Carbon Concentration,ocean_biochem,mol m-3 ua,ua,eastward_wind,Eastward Wind,atmos,m s-1 +ua200,ua200,eastward_wind,Eastward Wind,atmos,m s-1 +ua850,ua850,eastward_wind,Eastward Wind,atmos,m s-1 clhcalipso_sat,clhcalipso,cloud_area_fraction_in_atmosphere_layer,CALIPSO High Level Cloud Fraction,atmos,% qo3v,tro3,mole_fraction_of_ozone_in_air,Mole Fraction of O3,atmos,1e-9 om_emis_col,emibb,tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_due_to_emission,Total Emission of Primary Aerosol from Biomass Burning,aerosol,kg m-2 s-1 @@ -462,6 +464,8 @@ bddtdip,bddtdip,tendency_of_mole_concentration_of_dissolved_inorganic_phosphate_ hus,hus,specific_humidity,Specific Humidity,atmos,1 parasol_refl_sat,parasolRefl,toa_bidirectional_reflectance,PARASOL Reflectance,atmos,1 va,va,northward_wind,Northward Wind,atmos,m s-1 +va200,va200,northward_wind,Northward Wind,atmos,m s-1 +va850,va850,northward_wind,Northward Wind,atmos,m s-1 fl_ccsnow,prsnc,convective_snowfall_flux,Convective Snowfall Flux,atmos,kg m-2 s-1 zostoga,zostoga,global_average_thermosteric_sea_level_change,Global Average Thermosteric Sea Level Change,ocean,m evap,evs,water_evaporation_flux,Water Evaporation Flux Where Ice Free Ocean over Sea,ocean,kg m-2 s-1 diff --git a/diagnostics/convective_transition_diag/convecTransBasic.py b/diagnostics/convective_transition_diag/convecTransBasic.py index f203a9c3a..bba989392 100644 --- a/diagnostics/convective_transition_diag/convecTransBasic.py +++ b/diagnostics/convective_transition_diag/convecTransBasic.py @@ -64,6 +64,7 @@ from convecTransBasic_util import convecTransBasic_calc_model from convecTransBasic_util import convecTransBasic_loadAnalyzedData from convecTransBasic_util import convecTransBasic_plot + print("**************************************************") print("Excuting Convective Transition Basic Statistics (convecTransBasic.py)......") print("**************************************************") @@ -77,8 +78,8 @@ print("Load user-specified binning parameters..."), # Create and read user-specified parameters -os.system("python "+ os.environ["POD_HOME"]+ "/" + "convecTransBasic_usp_calc.py") -with open(os.environ["WORK_DIR"]+"/" + "convecTransBasic_calc_parameters.json") as outfile: +os.system("python " + os.environ["POD_HOME"] + "/" + "convecTransBasic_usp_calc.py") +with open(os.environ["WORK_DIR"] + "/" + "convecTransBasic_calc_parameters.json") as outfile: bin_data = json.load(outfile) print("...Loaded!") @@ -108,15 +109,15 @@ + ") will be saved to " + bin_data["PREPROCESSING_OUTPUT_DIR"] + "/") # Load & pre-process region mask - REGION=generate_region_mask(bin_data["REGION_MASK_DIR"] + "/" + bin_data["REGION_MASK_FILENAME"], - bin_data["pr_list"][0], bin_data["LAT_VAR"], bin_data["LON_VAR"]) + REGION = generate_region_mask(bin_data["REGION_MASK_DIR"] + "/" + bin_data["REGION_MASK_FILENAME"], + bin_data["pr_list"][0], bin_data["LAT_VAR"], bin_data["LON_VAR"]) # Pre-process temperature (if necessary) & bin & save binned results - binned_output=convecTransBasic_calc_model(REGION, bin_data["args1"]) + binned_output = convecTransBasic_calc_model(REGION, bin_data["args1"]) else: # Binned data file exists & BIN_ANYWAY=False print("Binned output detected..."), - binned_output=convecTransBasic_loadAnalyzedData(bin_data["args2"]) + binned_output = convecTransBasic_loadAnalyzedData(bin_data["args2"]) print("...Loaded!") # ====================================================================== diff --git a/diagnostics/convective_transition_diag/convecTransBasic_usp_plot.py b/diagnostics/convective_transition_diag/convecTransBasic_usp_plot.py index e59b93ca7..cff6fd95c 100644 --- a/diagnostics/convective_transition_diag/convecTransBasic_usp_plot.py +++ b/diagnostics/convective_transition_diag/convecTransBasic_usp_plot.py @@ -14,7 +14,7 @@ import glob with open(os.environ["WORK_DIR"] + "/" + "convecTransBasic_calc_parameters.json") as outfile: - bin_data=json.load(outfile) + bin_data = json.load(outfile) # ====================================================================== # START USER SPECIFIED SECTION @@ -174,7 +174,7 @@ bin_data["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"] ] -data["args4"] = [ bin_data["CWV_BIN_WIDTH"], PDF_THRESHOLD, CWV_RANGE_THRESHOLD, +data["args4"] = [bin_data["CWV_BIN_WIDTH"], PDF_THRESHOLD, CWV_RANGE_THRESHOLD, CP_THRESHOLD, bin_data["MODEL"], bin_data["REGION_STR"], bin_data["NUMBER_OF_REGIONS"], bin_data["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"], bin_data["PRECIP_THRESHOLD"], FIG_OUTPUT_DIR, FIG_OUTPUT_FILENAME, diff --git a/diagnostics/convective_transition_diag/convecTransBasic_util.py b/diagnostics/convective_transition_diag/convecTransBasic_util.py index 27d342892..4f14a618b 100644 --- a/diagnostics/convective_transition_diag/convecTransBasic_util.py +++ b/diagnostics/convective_transition_diag/convecTransBasic_util.py @@ -29,6 +29,7 @@ import matplotlib.cm as cm import networkx + # ====================================================================== # convecTransBasic_binTave # takes arguments and bins by CWV & tave bins @@ -38,22 +39,24 @@ def convecTransBasic_binTave(lon_idx, CWV_BIN_WIDTH, NUMBER_OF_REGIONS, NUMBER_T NUMBER_CWV_BIN, PRECIP_THRESHOLD, REGION, CWV, RAIN, temp, QSAT_INT, p0, p1, p2, pe, q0, q1): for lat_idx in numpy.arange(CWV.shape[1]): - reg = REGION[lon_idx,lat_idx] + reg = REGION[lon_idx, lat_idx] if reg > 0 and reg <= NUMBER_OF_REGIONS: - cwv_idx=CWV[:,lat_idx,lon_idx] - rain=RAIN[:,lat_idx,lon_idx] - temp_idx=temp[:,lat_idx,lon_idx] - qsat_int=QSAT_INT[:,lat_idx,lon_idx] + cwv_idx = CWV[:, lat_idx, lon_idx] + rain = RAIN[:, lat_idx, lon_idx] + temp_idx = temp[:, lat_idx, lon_idx] + qsat_int = QSAT_INT[:, lat_idx, lon_idx] for time_idx in numpy.arange(CWV.shape[0]): - if (temp_idx[time_idx]=0 and cwv_idx[time_idx]PRECIP_THRESHOLD): - pe[reg-1,cwv_idx[time_idx],temp_idx[time_idx]]+=1 - if (cwv_idx[time_idx]+1>(0.6/CWV_BIN_WIDTH)*qsat_int[time_idx]): - q0[reg-1,temp_idx[time_idx]]+=1 - q1[reg-1,temp_idx[time_idx]]+=qsat_int[time_idx] + if (temp_idx[time_idx] < NUMBER_TEMP_BIN and temp_idx[time_idx] >= 0 and cwv_idx[ + time_idx] < NUMBER_CWV_BIN): + p0[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += 1 + p1[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += rain[time_idx] + p2[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += rain[time_idx] ** 2 + if rain[time_idx] > PRECIP_THRESHOLD: + pe[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += 1 + if cwv_idx[time_idx] + 1 > (0.6 / CWV_BIN_WIDTH) * qsat_int[time_idx]: + q0[reg - 1, temp_idx[time_idx]] += 1 + q1[reg - 1, temp_idx[time_idx]] += qsat_int[time_idx] + # ====================================================================== # convecTransBasic_binQsatInt @@ -63,18 +66,20 @@ def convecTransBasic_binTave(lon_idx, CWV_BIN_WIDTH, NUMBER_OF_REGIONS, NUMBER_T def convecTransBasic_binQsatInt(lon_idx, NUMBER_OF_REGIONS, NUMBER_TEMP_BIN, NUMBER_CWV_BIN, PRECIP_THRESHOLD, REGION, CWV, RAIN, temp, p0, p1, p2, pe): for lat_idx in numpy.arange(CWV.shape[1]): - reg=REGION[lon_idx,lat_idx] - if (reg>0 and reg<=NUMBER_OF_REGIONS): - cwv_idx=CWV[:,lat_idx,lon_idx] - rain=RAIN[:,lat_idx,lon_idx] - temp_idx=temp[:,lat_idx,lon_idx] + reg = REGION[lon_idx, lat_idx] + if reg > 0 and reg <= NUMBER_OF_REGIONS: + cwv_idx = CWV[:, lat_idx, lon_idx] + rain = RAIN[:, lat_idx, lon_idx] + temp_idx = temp[:, lat_idx, lon_idx] for time_idx in numpy.arange(CWV.shape[0]): - if (temp_idx[time_idx]=0 and cwv_idx[time_idx]PRECIP_THRESHOLD): - pe[reg-1,cwv_idx[time_idx],temp_idx[time_idx]]+=1 + if (temp_idx[time_idx] < NUMBER_TEMP_BIN and temp_idx[time_idx] >= 0 and cwv_idx[ + time_idx] < NUMBER_CWV_BIN): + p0[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += 1 + p1[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += rain[time_idx] + p2[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += rain[time_idx] ** 2 + if (rain[time_idx] > PRECIP_THRESHOLD): + pe[reg - 1, cwv_idx[time_idx], temp_idx[time_idx]] += 1 + # ====================================================================== # generate_region_mask @@ -94,43 +99,43 @@ def generate_region_mask(region_mask_filename, # Load & Pre-process Region Mask print("Generating region mask...") matfile = scipy.io.loadmat(region_mask_filename) - lat_m=matfile["lat"] - lon_m=matfile["lon"] # 0.125~359.875 deg - region=matfile["region"] - lon_m=numpy.append(lon_m,numpy.reshape(lon_m[0,:],(-1,1))+360,0) - lon_m=numpy.append(numpy.reshape(lon_m[-2,:],(-1,1))-360,lon_m,0) - region=numpy.append(region,numpy.reshape(region[0,:],(-1,lat_m.size)),0) - region=numpy.append(numpy.reshape(region[-2,:],(-1,lat_m.size)),region,0) + lat_m = matfile["lat"] + lon_m = matfile["lon"] # 0.125~359.875 deg + region = matfile["region"] + lon_m = numpy.append(lon_m, numpy.reshape(lon_m[0, :], (-1, 1)) + 360, 0) + lon_m = numpy.append(numpy.reshape(lon_m[-2, :], (-1, 1)) - 360, lon_m, 0) + region = numpy.append(region, numpy.reshape(region[0, :], (-1, lat_m.size)), 0) + region = numpy.append(numpy.reshape(region[-2, :], (-1, lat_m.size)), region, 0) - LAT,LON=numpy.meshgrid(lat_m,lon_m,sparse=False,indexing="xy") - LAT=numpy.reshape(LAT,(-1,1)) - LON=numpy.reshape(LON,(-1,1)) - REGION=numpy.reshape(region,(-1,1)) + LAT, LON = numpy.meshgrid(lat_m, lon_m, sparse=False, indexing="xy") + LAT = numpy.reshape(LAT, (-1, 1)) + LON = numpy.reshape(LON, (-1, 1)) + REGION = numpy.reshape(region, (-1, 1)) - LATLON=numpy.squeeze(numpy.array((LAT,LON))) - LATLON=LATLON.transpose() + LATLON = numpy.squeeze(numpy.array((LAT, LON))) + LATLON = LATLON.transpose() - regMaskInterpolator=NearestNDInterpolator(LATLON,REGION) + regMaskInterpolator = NearestNDInterpolator(LATLON, REGION) # Interpolate Region Mask onto Model Grid using Nearest Grid Value - pr_netcdf=Dataset(model_netcdf_filename,"r") - lon=numpy.asarray(pr_netcdf.variables[lon_var][:],dtype="float") - lat=numpy.asarray(pr_netcdf.variables[lat_var][:],dtype="float") + pr_netcdf = Dataset(model_netcdf_filename, "r") + lon = numpy.asarray(pr_netcdf.variables[lon_var][:], dtype="float") + lat = numpy.asarray(pr_netcdf.variables[lat_var][:], dtype="float") pr_netcdf.close() - if lon[lon<0.0].size>0: - lon[lon[lon<0.0]] += 360.0 - lat=lat[numpy.logical_and(lat >= -20.0, lat <= 20.0)] - - LAT,LON=numpy.meshgrid(lat,lon,sparse=False,indexing="xy") - LAT=numpy.reshape(LAT,(-1,1)) - LON=numpy.reshape(LON,(-1,1)) - LATLON=numpy.squeeze(numpy.array((LAT,LON))) - LATLON=LATLON.transpose() - REGION=numpy.zeros(LAT.size) + if lon[lon < 0.0].size > 0: + lon[lon[lon < 0.0]] += 360.0 + lat = lat[numpy.logical_and(lat >= -20.0, lat <= 20.0)] + + LAT, LON = numpy.meshgrid(lat, lon, sparse=False, indexing="xy") + LAT = numpy.reshape(LAT, (-1, 1)) + LON = numpy.reshape(LON, (-1, 1)) + LATLON = numpy.squeeze(numpy.array((LAT, LON))) + LATLON = LATLON.transpose() + REGION = numpy.zeros(LAT.size) for latlon_idx in numpy.arange(REGION.shape[0]): - REGION[latlon_idx]=regMaskInterpolator(LATLON[latlon_idx,:]) - REGION=numpy.reshape(REGION.astype(int),(-1,lat.size)) - + REGION[latlon_idx] = regMaskInterpolator(LATLON[latlon_idx, :]) + REGION = numpy.reshape(REGION.astype(int), (-1, lat.size)) + print("...Generated!") return REGION @@ -140,6 +145,7 @@ def generate_region_mask(region_mask_filename, # mp.contourf(lon.squeeze(), lat.squeeze(), REGION.T) # mp.axes().set_aspect('equal') + # ====================================================================== # convecTransBasic_calcTaveQsatInt # takes in 3D tropospheric temperature fields and calculates tave & qsat_int @@ -148,289 +154,295 @@ def generate_region_mask(region_mask_filename, # Definition of column can be changed through p_lev_bottom & p_lev_top, # but the default filenames for tave & qsat_int do not contain column info -def convecTransBasic_calcTaveQsatInt(ta_netcdf_filename,TA_VAR,PRES_VAR,MODEL,\ - p_lev_bottom,p_lev_top,dp,time_idx_delta,\ - SAVE_TAVE_QSAT_INT,PREPROCESSING_OUTPUT_DIR,\ - TAVE_VAR,QSAT_INT_VAR,TIME_VAR,LAT_VAR,LON_VAR): +def convecTransBasic_calcTaveQsatInt(ta_netcdf_filename, TA_VAR, PRES_VAR, MODEL, + p_lev_bottom, p_lev_top, dp, time_idx_delta, + SAVE_TAVE_QSAT_INT, PREPROCESSING_OUTPUT_DIR, + TAVE_VAR, QSAT_INT_VAR, TIME_VAR, LAT_VAR, LON_VAR): # Constants for calculating saturation vapor pressure - Tk0 = 273.15 # Reference temperature. - Es0 = 610.7 # Vapor pressure [Pa] at Tk0. - Lv0 = 2500800 # Latent heat of evaporation at Tk0. - cpv = 1869.4 # Isobaric specific heat capacity of water vapor at tk0. - cl = 4218.0 # Specific heat capacity of liquid water at tk0. - R = 8.3144 # Universal gas constant. - Mw = 0.018015 # Molecular weight of water. - Rv = R/Mw # Gas constant for water vapor. - Ma = 0.028964 # Molecular weight of dry air. - Rd = R/Ma # Gas constant for dry air. - epsilon = Mw/Ma + Tk0 = 273.15 # Reference temperature. + Es0 = 610.7 # Vapor pressure [Pa] at Tk0. + Lv0 = 2500800 # Latent heat of evaporation at Tk0. + cpv = 1869.4 # Isobaric specific heat capacity of water vapor at tk0. + cl = 4218.0 # Specific heat capacity of liquid water at tk0. + R = 8.3144 # Universal gas constant. + Mw = 0.018015 # Molecular weight of water. + Rv = R / Mw # Gas constant for water vapor. + Ma = 0.028964 # Molecular weight of dry air. + Rd = R / Ma # Gas constant for dry air. + epsilon = Mw / Ma g = 9.80665 # Calculate tave & qsat_int # Column: 1000-200mb (+/- dp mb) - ta_netcdf=Dataset(ta_netcdf_filename,"r") - lat=numpy.asarray(ta_netcdf.variables[LAT_VAR][:],dtype="float") - pfull=numpy.asarray(ta_netcdf.variables[PRES_VAR][:],dtype="float") - if (max(pfull)>2000): # If units: Pa - pfull*=0.01 - FLIP_PRES=(pfull[1]-pfull[0]<0) + ta_netcdf = Dataset(ta_netcdf_filename, "r") + lat = numpy.asarray(ta_netcdf.variables[LAT_VAR][:], dtype="float") + pfull = numpy.asarray(ta_netcdf.variables[PRES_VAR][:], dtype="float") + if max(pfull) > 2000: # If units: Pa + pfull *= 0.01 + FLIP_PRES = (pfull[1] - pfull[0] < 0) if FLIP_PRES: - pfull=numpy.flipud(pfull) - tave=numpy.array([]) - qsat_int=numpy.array([]) + pfull = numpy.flipud(pfull) + tave = numpy.array([]) + qsat_int = numpy.array([]) - time_idx_start=0 + time_idx_start = 0 - print(" Pre-processing "+ta_netcdf_filename) + print(" Pre-processing " + ta_netcdf_filename) - while (time_idx_start=-20.0,lat<=20.0),:],dtype="float") - ta=numpy.fliplr(ta) + ta = numpy.asarray( + ta_netcdf.variables[TA_VAR][time_idx_start:time_idx_end, pfull.size - (p_max + 1):pfull.size - p_min, + numpy.logical_and(lat >= -20.0, lat <= 20.0), :], dtype="float") + ta = numpy.fliplr(ta) else: - ta=numpy.asarray(ta_netcdf.variables[TA_VAR][time_idx_start:time_idx_end,p_min:p_max+1,numpy.logical_and(lat>=-20.0,lat<=20.0),:],dtype="float") - time_idx_start=time_idx_end - p_max=p_max-p_min - p_min=0 + ta = numpy.asarray(ta_netcdf.variables[TA_VAR][time_idx_start:time_idx_end, p_min:p_max + 1, + numpy.logical_and(lat >= -20.0, lat <= 20.0), :], dtype="float") + time_idx_start = time_idx_end + p_max = p_max - p_min + p_min = 0 - if (plev[p_min]p_lev_bottom+dp): + if plev[p_max] > p_lev_bottom + dp: # Update plev(p_max) <-- p_lev_bottom # AND Update ta(p_max) <-- ta(p_lev_bottom) by interpolation - ta[:,p_max,:,:]=ta[:,p_max,:,:] \ - +(p_lev_bottom-plev[p_max]) \ - /(plev[p_max-1]-plev[p_max]) \ - *(ta[:,p_max-1,:,:]-ta[:,p_max,:,:]) - plev[p_max]=p_lev_bottom + ta[:, p_max, :, :] = ta[:, p_max, :, :] \ + + (p_lev_bottom - plev[p_max]) \ + / (plev[p_max - 1] - plev[p_max]) \ + * (ta[:, p_max - 1, :, :] - ta[:, p_max, :, :]) + plev[p_max] = p_lev_bottom - if (plev[p_max]=-20.0,latitude<=20.0)] + ta_netcdf = Dataset(ta_netcdf_filename, "r") + time = ta_netcdf.variables[TIME_VAR] + longitude = numpy.asarray(ta_netcdf.variables[LON_VAR][:], dtype="float") + latitude = numpy.asarray(ta_netcdf.variables[LAT_VAR][:], dtype="float") + latitude = latitude[numpy.logical_and(latitude >= -20.0, latitude <= 20.0)] # Save 1000-200mb Column Average Temperature as tave - tave_output_filename=PREPROCESSING_OUTPUT_DIR+"/"+ta_netcdf_filename.split('/')[-1].replace("."+TA_VAR+".","."+TAVE_VAR+".") - tave_output_netcdf=Dataset(tave_output_filename,"w",format="NETCDF4") - tave_output_netcdf.description=str(p_lev_bottom)+"-"+str(p_lev_top)+" hPa "\ - +"Mass-Weighted Column Average Temperature for "+MODEL - tave_output_netcdf.source="Convective Onset Statistics Diagnostic Package \ + tave_output_filename = PREPROCESSING_OUTPUT_DIR + "/" + ta_netcdf_filename.split('/')[-1].replace( + "." + TA_VAR + ".", "." + TAVE_VAR + ".") + tave_output_netcdf = Dataset(tave_output_filename, "w", format="NETCDF4") + tave_output_netcdf.description = str(p_lev_bottom) + "-" + str(p_lev_top) + " hPa " \ + + "Mass-Weighted Column Average Temperature for " + MODEL + tave_output_netcdf.source = "Convective Onset Statistics Diagnostic Package\ - as part of the NOAA Model Diagnostic Task Force (MDTF) effort" - lon_dim=tave_output_netcdf.createDimension(LON_VAR,len(longitude)) - lon_val=tave_output_netcdf.createVariable(LON_VAR,numpy.float64,(LON_VAR,)) - lon_val.units="degree" - lon_val[:]=longitude + lon_dim = tave_output_netcdf.createDimension(LON_VAR, len(longitude)) + lon_val = tave_output_netcdf.createVariable(LON_VAR, numpy.float64, (LON_VAR,)) + lon_val.units = "degree" + lon_val[:] = longitude - lat_dim=tave_output_netcdf.createDimension(LAT_VAR,len(latitude)) - lat_val=tave_output_netcdf.createVariable(LAT_VAR,numpy.float64,(LAT_VAR,)) - lat_val.units="degree_north" - lat_val[:]=latitude + lat_dim = tave_output_netcdf.createDimension(LAT_VAR, len(latitude)) + lat_val = tave_output_netcdf.createVariable(LAT_VAR, numpy.float64, (LAT_VAR,)) + lat_val.units = "degree_north" + lat_val[:] = latitude - time_dim=tave_output_netcdf.createDimension(TIME_VAR,None) - time_val=tave_output_netcdf.createVariable(TIME_VAR,numpy.float64,(TIME_VAR,)) - time_val.units=time.units - time_val[:]=time[:] + time_dim = tave_output_netcdf.createDimension(TIME_VAR, None) + time_val = tave_output_netcdf.createVariable(TIME_VAR, numpy.float64, (TIME_VAR,)) + time_val.units = time.units + time_val[:] = time[:] - tave_val=tave_output_netcdf.createVariable(TAVE_VAR,numpy.float64,(TIME_VAR,LAT_VAR,LON_VAR)) - tave_val.units="K" - tave_val[:,:,:]=tave + tave_val = tave_output_netcdf.createVariable(TAVE_VAR, numpy.float64, (TIME_VAR, LAT_VAR, LON_VAR)) + tave_val.units = "K" + tave_val[:, :, :] = tave tave_output_netcdf.close() - print(' '+tave_output_filename+" saved!") + print(' ' + tave_output_filename + " saved!") # Save 1000-200mb Column-integrated Saturation Specific Humidity as qsat_int - qsat_int_output_filename=PREPROCESSING_OUTPUT_DIR+"/"+ta_netcdf_filename.split('/')[-1].replace("."+TA_VAR+".","."+QSAT_INT_VAR+".") - qsat_int_output_netcdf=Dataset(qsat_int_output_filename,"w",format="NETCDF4") - qsat_int_output_netcdf.description=str(p_lev_bottom)+"-"+str(p_lev_top)+" hPa "\ - +"Column-integrated Saturation Specific Humidity for "+MODEL - qsat_int_output_netcdf.source="Convective Onset Statistics Diagnostic Package \ + qsat_int_output_filename = PREPROCESSING_OUTPUT_DIR + "/" + ta_netcdf_filename.split('/')[-1].replace( + "." + TA_VAR + ".", "." + QSAT_INT_VAR + ".") + qsat_int_output_netcdf = Dataset(qsat_int_output_filename, "w", format="NETCDF4") + qsat_int_output_netcdf.description = str(p_lev_bottom) + "-" + str(p_lev_top) + " hPa " \ + + "Column-integrated Saturation Specific Humidity for " + MODEL + qsat_int_output_netcdf.source = "Convective Onset Statistics Diagnostic Package \ - as part of the NOAA Model Diagnostic Task Force (MDTF) effort" - lon_dim=qsat_int_output_netcdf.createDimension(LON_VAR,len(longitude)) - lon_val=qsat_int_output_netcdf.createVariable(LON_VAR,numpy.float64,(LON_VAR,)) - lon_val.units="degree" - lon_val[:]=longitude + lon_dim = qsat_int_output_netcdf.createDimension(LON_VAR, len(longitude)) + lon_val = qsat_int_output_netcdf.createVariable(LON_VAR, numpy.float64, (LON_VAR,)) + lon_val.units = "degree" + lon_val[:] = longitude - lat_dim=qsat_int_output_netcdf.createDimension(LAT_VAR,len(latitude)) - lat_val=qsat_int_output_netcdf.createVariable(LAT_VAR,numpy.float64,(LAT_VAR,)) - lat_val.units="degree_north" - lat_val[:]=latitude + lat_dim = qsat_int_output_netcdf.createDimension(LAT_VAR, len(latitude)) + lat_val = qsat_int_output_netcdf.createVariable(LAT_VAR, numpy.float64, (LAT_VAR,)) + lat_val.units = "degree_north" + lat_val[:] = latitude - time_dim=qsat_int_output_netcdf.createDimension(TIME_VAR,None) - time_val=qsat_int_output_netcdf.createVariable(TIME_VAR,numpy.float64,(TIME_VAR,)) - time_val.units=time.units - time_val[:]=time[:] + time_dim = qsat_int_output_netcdf.createDimension(TIME_VAR, None) + time_val = qsat_int_output_netcdf.createVariable(TIME_VAR, numpy.float64, (TIME_VAR,)) + time_val.units = time.units + time_val[:] = time[:] - qsat_int_val=qsat_int_output_netcdf.createVariable(QSAT_INT_VAR,numpy.float64,(TIME_VAR,LAT_VAR,LON_VAR)) - qsat_int_val.units="mm" - qsat_int_val[:,:,:]=qsat_int + qsat_int_val = qsat_int_output_netcdf.createVariable(QSAT_INT_VAR, numpy.float64, (TIME_VAR, LAT_VAR, LON_VAR)) + qsat_int_val.units = "mm" + qsat_int_val[:, :, :] = qsat_int qsat_int_output_netcdf.close() - print(' '+qsat_int_output_filename+" saved!") + print(' ' + qsat_int_output_filename + " saved!") ta_netcdf.close() # End-if SAVE_TAVE_QSAT_INT==1 return tave, qsat_int + # ====================================================================== # convecTransBasic_calc_model # takes in ALL 2D pre-processed fields (precip, CWV, and EITHER tave or qsat_int), # calculates the binned data, and save it as a netCDF file # in the var_data/convective_transition_diag directory -def convecTransBasic_calc_model(REGION,*argsv): +def convecTransBasic_calc_model(REGION, *argsv): # ALLOCATE VARIABLES FOR EACH ARGUMENT - + BULK_TROPOSPHERIC_TEMPERATURE_MEASURE, \ - CWV_BIN_WIDTH, \ - CWV_RANGE_MAX, \ - T_RANGE_MIN, \ - T_RANGE_MAX, \ - T_BIN_WIDTH, \ - Q_RANGE_MIN, \ - Q_RANGE_MAX, \ - Q_BIN_WIDTH, \ - NUMBER_OF_REGIONS, \ - pr_list, \ - PR_VAR, \ - prw_list, \ - PRW_VAR, \ - PREPROCESS_TA, \ - MODEL_OUTPUT_DIR, \ - qsat_int_list, \ - QSAT_INT_VAR, \ - tave_list, \ - TAVE_VAR, \ - ta_list, \ - TA_VAR, \ - PRES_VAR, \ - MODEL, \ - p_lev_bottom, \ - p_lev_top, \ - dp, \ - time_idx_delta, \ - SAVE_TAVE_QSAT_INT, \ - PREPROCESSING_OUTPUT_DIR, \ - PRECIP_THRESHOLD, \ - BIN_OUTPUT_DIR, \ - BIN_OUTPUT_FILENAME, \ - TIME_VAR, \ - LAT_VAR, \ - LON_VAR = argsv[0] + CWV_BIN_WIDTH, \ + CWV_RANGE_MAX, \ + T_RANGE_MIN, \ + T_RANGE_MAX, \ + T_BIN_WIDTH, \ + Q_RANGE_MIN, \ + Q_RANGE_MAX, \ + Q_BIN_WIDTH, \ + NUMBER_OF_REGIONS, \ + pr_list, \ + PR_VAR, \ + prw_list, \ + PRW_VAR, \ + PREPROCESS_TA, \ + MODEL_OUTPUT_DIR, \ + qsat_int_list, \ + QSAT_INT_VAR, \ + tave_list, \ + TAVE_VAR, \ + ta_list, \ + TA_VAR, \ + PRES_VAR, \ + MODEL, \ + p_lev_bottom, \ + p_lev_top, \ + dp, \ + time_idx_delta, \ + SAVE_TAVE_QSAT_INT, \ + PREPROCESSING_OUTPUT_DIR, \ + PRECIP_THRESHOLD, \ + BIN_OUTPUT_DIR, \ + BIN_OUTPUT_FILENAME, \ + TIME_VAR, \ + LAT_VAR, \ + LON_VAR = argsv[0] # Pre-process temperature field if necessary if PREPROCESS_TA == 1: print(" Start pre-processing atmospheric temperature fields...") for li in numpy.arange(len(pr_list)): - convecTransBasic_calcTaveQsatInt(ta_list[li],TA_VAR,PRES_VAR,MODEL, - p_lev_bottom,p_lev_top,dp,time_idx_delta, - SAVE_TAVE_QSAT_INT,PREPROCESSING_OUTPUT_DIR, - TAVE_VAR,QSAT_INT_VAR,TIME_VAR,LAT_VAR,LON_VAR) + convecTransBasic_calcTaveQsatInt(ta_list[li], TA_VAR, PRES_VAR, MODEL, + p_lev_bottom, p_lev_top, dp, time_idx_delta, + SAVE_TAVE_QSAT_INT, PREPROCESSING_OUTPUT_DIR, + TAVE_VAR, QSAT_INT_VAR, TIME_VAR, LAT_VAR, LON_VAR) # Re-load file lists for tave & qsat_int - tave_list = sorted(glob.glob(PREPROCESSING_OUTPUT_DIR+"/"+os.environ["tave_file"])) - qsat_int_list = sorted(glob.glob(PREPROCESSING_OUTPUT_DIR+"/"+os.environ["qsat_int_file"])) - + tave_list = sorted(glob.glob(PREPROCESSING_OUTPUT_DIR + "/" + os.environ["tave_file"])) + qsat_int_list = sorted(glob.glob(PREPROCESSING_OUTPUT_DIR + "/" + os.environ["qsat_int_file"])) + # Allocate Memory for Arrays for Binning Output - + # Define Bin Centers cwv_bin_center = numpy.arange(CWV_BIN_WIDTH, CWV_RANGE_MAX + CWV_BIN_WIDTH, CWV_BIN_WIDTH) - + # Bulk Tropospheric Temperature Measure (1:tave, or 2:qsat_int) if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - tave_bin_center=numpy.arange(T_RANGE_MIN, T_RANGE_MAX+T_BIN_WIDTH, T_BIN_WIDTH) - temp_bin_center=tave_bin_center - temp_bin_width=T_BIN_WIDTH + tave_bin_center = numpy.arange(T_RANGE_MIN, T_RANGE_MAX + T_BIN_WIDTH, T_BIN_WIDTH) + temp_bin_center = tave_bin_center + temp_bin_width = T_BIN_WIDTH elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - qsat_int_bin_center=numpy.arange(Q_RANGE_MIN, Q_RANGE_MAX+Q_BIN_WIDTH, Q_BIN_WIDTH) - temp_bin_center=qsat_int_bin_center - temp_bin_width=Q_BIN_WIDTH - + qsat_int_bin_center = numpy.arange(Q_RANGE_MIN, Q_RANGE_MAX + Q_BIN_WIDTH, Q_BIN_WIDTH) + temp_bin_center = qsat_int_bin_center + temp_bin_width = Q_BIN_WIDTH + NUMBER_CWV_BIN = cwv_bin_center.size NUMBER_TEMP_BIN = temp_bin_center.size - temp_offset = temp_bin_center[0]-0.5*temp_bin_width + temp_offset = temp_bin_center[0] - 0.5 * temp_bin_width # Allocate Memory for Arrays - P0=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_CWV_BIN,NUMBER_TEMP_BIN)) - P1=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_CWV_BIN,NUMBER_TEMP_BIN)) - P2=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_CWV_BIN,NUMBER_TEMP_BIN)) - PE=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_CWV_BIN,NUMBER_TEMP_BIN)) - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - Q0=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_TEMP_BIN)) - Q1=numpy.zeros((NUMBER_OF_REGIONS,NUMBER_TEMP_BIN)) + P0 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + P1 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + P2 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + PE = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + Q0 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) + Q1 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) # Binning by calling convecTransBasic_binTave or convecTransBasic_binQsatInt @@ -438,68 +450,68 @@ def convecTransBasic_calc_model(REGION,*argsv): for li in numpy.arange(len(pr_list)): - pr_netcdf=Dataset(pr_list[li],"r") - lat=numpy.asarray(pr_netcdf.variables[LAT_VAR][:],dtype="float") - pr=numpy.squeeze(numpy.asarray(pr_netcdf.variables[PR_VAR][:, :, :], dtype="float")) + pr_netcdf = Dataset(pr_list[li], "r") + lat = numpy.asarray(pr_netcdf.variables[LAT_VAR][:], dtype="float") + pr = numpy.squeeze(numpy.asarray(pr_netcdf.variables[PR_VAR][:, :, :], dtype="float")) pr_netcdf.close() # Units: mm/s --> mm/h - pr=pr[:,numpy.logical_and(lat>=-20.0,lat<=20.0),:]*3.6e3 - print(" "+pr_list[li]+" Loaded!") + pr = pr[:, numpy.logical_and(lat >= -20.0, lat <= 20.0), :] * 3.6e3 + print(" " + pr_list[li] + " Loaded!") prw_netcdf = Dataset(prw_list[li], "r") lat = numpy.asarray(prw_netcdf.variables[LAT_VAR][:], dtype="float") prw = numpy.squeeze(numpy.asarray(prw_netcdf.variables[PRW_VAR][:, :, :], dtype="float")) prw_netcdf.close() prw = prw[:, numpy.logical_and(lat >= -20.0, lat <= 20.0), :] - print(" "+prw_list[li]+" Loaded!") - - qsat_int_netcdf=Dataset(qsat_int_list[li],"r") - lat=numpy.asarray(qsat_int_netcdf.variables[LAT_VAR][:], dtype="float") - qsat_int=numpy.squeeze(numpy.asarray(qsat_int_netcdf.variables[QSAT_INT_VAR][:, :, :], dtype="float")) + print(" " + prw_list[li] + " Loaded!") + + qsat_int_netcdf = Dataset(qsat_int_list[li], "r") + lat = numpy.asarray(qsat_int_netcdf.variables[LAT_VAR][:], dtype="float") + qsat_int = numpy.squeeze(numpy.asarray(qsat_int_netcdf.variables[QSAT_INT_VAR][:, :, :], dtype="float")) qsat_int_netcdf.close() - qsat_int=qsat_int[:,numpy.logical_and(lat>=-20.0, lat<=20.0),:] - - print(" "+qsat_int_list[li]+" Loaded!") - + qsat_int = qsat_int[:, numpy.logical_and(lat >= -20.0, lat <= 20.0), :] + + print(" " + qsat_int_list[li] + " Loaded!") + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - tave_netcdf=Dataset(tave_list[li], "r") - lat=numpy.asarray(tave_netcdf.variables[LAT_VAR][:], dtype="float") - tave=numpy.squeeze(numpy.asarray(tave_netcdf.variables[TAVE_VAR][:, :, :], dtype="float")) + tave_netcdf = Dataset(tave_list[li], "r") + lat = numpy.asarray(tave_netcdf.variables[LAT_VAR][:], dtype="float") + tave = numpy.squeeze(numpy.asarray(tave_netcdf.variables[TAVE_VAR][:, :, :], dtype="float")) tave_netcdf.close() - tave=tave[:, numpy.logical_and(lat>=-20.0, lat<=20.0),:] - - print(" "+tave_list[li]+" Loaded!") - + tave = tave[:, numpy.logical_and(lat >= -20.0, lat <= 20.0), :] + + print(" " + tave_list[li] + " Loaded!") + print(" Binning..."), # Start binning - CWV=prw/CWV_BIN_WIDTH-0.5 - CWV=CWV.astype(int) - RAIN=pr - - RAIN[RAIN<0] = 0 # Sometimes models produce negative rain rates + CWV = prw / CWV_BIN_WIDTH - 0.5 + CWV = CWV.astype(int) + RAIN = pr + + RAIN[RAIN < 0] = 0 # Sometimes models produce negative rain rates QSAT_INT = qsat_int if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: TAVE = tave - temp = (TAVE-temp_offset)/temp_bin_width + temp = (TAVE - temp_offset) / temp_bin_width elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - temp = (QSAT_INT-temp_offset)/temp_bin_width + temp = (QSAT_INT - temp_offset) / temp_bin_width temp = temp.astype(int) # Binning is structured in the following way to avoid potential round-off issue # (an issue arise when the total number of events reaches about 1e+8) for lon_idx in numpy.arange(CWV.shape[2]): - p0=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) - p1=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) - p2=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) - pe=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + p0 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + p1 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + p2 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) + pe = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_CWV_BIN, NUMBER_TEMP_BIN)) if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - q0=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) - q1=numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) + q0 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) + q1 = numpy.zeros((NUMBER_OF_REGIONS, NUMBER_TEMP_BIN)) convecTransBasic_binTave(lon_idx, CWV_BIN_WIDTH, - NUMBER_OF_REGIONS, NUMBER_TEMP_BIN, NUMBER_CWV_BIN, PRECIP_THRESHOLD, - REGION, CWV, RAIN, temp, QSAT_INT, - p0, p1, p2, pe, q0, q1) + NUMBER_OF_REGIONS, NUMBER_TEMP_BIN, NUMBER_CWV_BIN, PRECIP_THRESHOLD, + REGION, CWV, RAIN, temp, QSAT_INT, + p0, p1, p2, pe, q0, q1) elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: convecTransBasic_binQsatInt(lon_idx, NUMBER_OF_REGIONS, NUMBER_TEMP_BIN, NUMBER_CWV_BIN, PRECIP_THRESHOLD, @@ -509,87 +521,88 @@ def convecTransBasic_calc_model(REGION,*argsv): P2 += p2 PE += pe if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - Q0+=q0 - Q1+=q1 + Q0 += q0 + Q1 += q1 # end-for lon_idx print("...Complete for current files!") - + print(" Total binning complete!") # Save Binning Results - bin_output_netcdf = Dataset(BIN_OUTPUT_DIR +" /" + BIN_OUTPUT_FILENAME+".nc", "w", format="NETCDF4") - - bin_output_netcdf.description = "Convective Onset Statistics for "+MODEL + bin_output_netcdf = Dataset(BIN_OUTPUT_DIR + "/" + BIN_OUTPUT_FILENAME + ".nc", "w", format="NETCDF4") + + bin_output_netcdf.description = "Convective Onset Statistics for " + MODEL bin_output_netcdf.source = "Convective Onset Statistics Diagnostic Package \ - as part of the NOAA Model Diagnostic Task Force (MDTF) effort" bin_output_netcdf.PRECIP_THRESHOLD = PRECIP_THRESHOLD region = bin_output_netcdf.createDimension("region", NUMBER_OF_REGIONS) reg = bin_output_netcdf.createVariable("region", numpy.float64, ("region",)) - reg = numpy.arange(1,NUMBER_OF_REGIONS+1) + reg = numpy.arange(1, NUMBER_OF_REGIONS + 1) cwv = bin_output_netcdf.createDimension("cwv", len(cwv_bin_center)) - prw=bin_output_netcdf.createVariable("cwv", numpy.float64, ("cwv", )) - prw.units="mm" + prw = bin_output_netcdf.createVariable("cwv", numpy.float64, ("cwv",)) + prw.units = "mm" prw[:] = cwv_bin_center if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - tave=bin_output_netcdf.createDimension(TAVE_VAR,len(tave_bin_center)) - temp=bin_output_netcdf.createVariable(TAVE_VAR,numpy.float64,(TAVE_VAR,)) - temp.units="K" - temp[:]=tave_bin_center + tave = bin_output_netcdf.createDimension(TAVE_VAR, len(tave_bin_center)) + temp = bin_output_netcdf.createVariable(TAVE_VAR, numpy.float64, (TAVE_VAR,)) + temp.units = "K" + temp[:] = tave_bin_center - p0=bin_output_netcdf.createVariable("P0",numpy.float64,("region","cwv",TAVE_VAR)) - p0[:,:,:]=P0 + p0 = bin_output_netcdf.createVariable("P0", numpy.float64, ("region", "cwv", TAVE_VAR)) + p0[:, :, :] = P0 - p1=bin_output_netcdf.createVariable("P1",numpy.float64,("region","cwv",TAVE_VAR)) - p1.units="mm/h" - p1[:,:,:]=P1 + p1 = bin_output_netcdf.createVariable("P1", numpy.float64, ("region", "cwv", TAVE_VAR)) + p1.units = "mm/h" + p1[:, :, :] = P1 - p2=bin_output_netcdf.createVariable("P2",numpy.float64,("region","cwv",TAVE_VAR)) - p2.units="mm^2/h^2" - p2[:,:,:]=P2 + p2 = bin_output_netcdf.createVariable("P2", numpy.float64, ("region", "cwv", TAVE_VAR)) + p2.units = "mm^2/h^2" + p2[:, :, :] = P2 - pe=bin_output_netcdf.createVariable("PE",numpy.float64,("region","cwv",TAVE_VAR)) - pe[:,:,:]=PE + pe = bin_output_netcdf.createVariable("PE", numpy.float64, ("region", "cwv", TAVE_VAR)) + pe[:, :, :] = PE - q0=bin_output_netcdf.createVariable("Q0",numpy.float64,("region",TAVE_VAR)) - q0[:,:]=Q0 + q0 = bin_output_netcdf.createVariable("Q0", numpy.float64, ("region", TAVE_VAR)) + q0[:, :] = Q0 - q1=bin_output_netcdf.createVariable("Q1",numpy.float64,("region",TAVE_VAR)) - q1.units="mm" - q1[:,:]=Q1 + q1 = bin_output_netcdf.createVariable("Q1", numpy.float64, ("region", TAVE_VAR)) + q1.units = "mm" + q1[:, :] = Q1 elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: qsat_int = bin_output_netcdf.createDimension(QSAT_INT_VAR, len(qsat_int_bin_center)) - temp=bin_output_netcdf.createVariable(QSAT_INT_VAR, numpy.float64, (QSAT_INT_VAR,)) + temp = bin_output_netcdf.createVariable(QSAT_INT_VAR, numpy.float64, (QSAT_INT_VAR,)) temp.units = "mm" temp[:] = qsat_int_bin_center p0 = bin_output_netcdf.createVariable("P0", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) - p0[:,:,:] = P0 + p0[:, :, :] = P0 p1 = bin_output_netcdf.createVariable("P1", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) p1.units = " mm/h" p1[:, :, :] = P1 - p2=bin_output_netcdf.createVariable("P2", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) - p2.units="mm^2/h^2" - p2[:,:,:] = P2 + p2 = bin_output_netcdf.createVariable("P2", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) + p2.units = "mm^2/h^2" + p2[:, :, :] = P2 - pe=bin_output_netcdf.createVariable("PE", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) - pe[:,:,:] = PE + pe = bin_output_netcdf.createVariable("PE", numpy.float64, ("region", "cwv", QSAT_INT_VAR)) + pe[:, :, :] = PE bin_output_netcdf.close() - print(" Binned results saved as "+BIN_OUTPUT_DIR+"/"+BIN_OUTPUT_FILENAME+".nc!") + print(" Binned results saved as " + BIN_OUTPUT_DIR + "/" + BIN_OUTPUT_FILENAME + ".nc!") if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: return cwv_bin_center, tave_bin_center, P0, P1, P2, PE, Q0, Q1, CWV_BIN_WIDTH, PRECIP_THRESHOLD elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: return cwv_bin_center, qsat_int_bin_center, P0, P1, P2, PE, [], [], CWV_BIN_WIDTH, PRECIP_THRESHOLD + # ====================================================================== # convecTransBasic_loadAnalyzedData # loads the binned output calculated from convecTransBasic_calc_model @@ -598,57 +611,57 @@ def convecTransBasic_calc_model(REGION,*argsv): def convecTransBasic_loadAnalyzedData(*argsv): bin_output_list, \ - TAVE_VAR, \ - QSAT_INT_VAR, \ - BULK_TROPOSPHERIC_TEMPERATURE_MEASURE = argsv[0] - + TAVE_VAR, \ + QSAT_INT_VAR, \ + BULK_TROPOSPHERIC_TEMPERATURE_MEASURE = argsv[0] + if len(bin_output_list) != 0: - bin_output_filename=bin_output_list[0] + bin_output_filename = bin_output_list[0] if bin_output_filename.split('.')[-1] == 'nc': - bin_output_netcdf=Dataset(bin_output_filename, "r") - - cwv_bin_center=numpy.asarray(bin_output_netcdf.variables["cwv"][:],dtype="float") - P0=numpy.asarray(bin_output_netcdf.variables["P0"][:,:,:],dtype="float") - P1=numpy.asarray(bin_output_netcdf.variables["P1"][:,:,:],dtype="float") - P2=numpy.asarray(bin_output_netcdf.variables["P2"][:,:,:],dtype="float") - PE=numpy.asarray(bin_output_netcdf.variables["PE"][:,:,:],dtype="float") - PRECIP_THRESHOLD=bin_output_netcdf.getncattr("PRECIP_THRESHOLD") - if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1): - temp_bin_center=numpy.asarray(bin_output_netcdf.variables[TAVE_VAR][:],dtype="float") - Q0=numpy.asarray(bin_output_netcdf.variables["Q0"][:,:],dtype="float") - Q1=numpy.asarray(bin_output_netcdf.variables["Q1"][:,:],dtype="float") - elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2): - temp_bin_center=numpy.asarray(bin_output_netcdf.variables[QSAT_INT_VAR][:],dtype="float") - Q0=[] - Q1=[] - CWV_BIN_WIDTH=cwv_bin_center[1]-cwv_bin_center[0] + bin_output_netcdf = Dataset(bin_output_filename, "r") + + cwv_bin_center = numpy.asarray(bin_output_netcdf.variables["cwv"][:], dtype="float") + P0 = numpy.asarray(bin_output_netcdf.variables["P0"][:, :, :], dtype="float") + P1 = numpy.asarray(bin_output_netcdf.variables["P1"][:, :, :], dtype="float") + P2 = numpy.asarray(bin_output_netcdf.variables["P2"][:, :, :], dtype="float") + PE = numpy.asarray(bin_output_netcdf.variables["PE"][:, :, :], dtype="float") + PRECIP_THRESHOLD = bin_output_netcdf.getncattr("PRECIP_THRESHOLD") + if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1): + temp_bin_center = numpy.asarray(bin_output_netcdf.variables[TAVE_VAR][:], dtype="float") + Q0 = numpy.asarray(bin_output_netcdf.variables["Q0"][:, :], dtype="float") + Q1 = numpy.asarray(bin_output_netcdf.variables["Q1"][:, :], dtype="float") + elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2): + temp_bin_center = numpy.asarray(bin_output_netcdf.variables[QSAT_INT_VAR][:], dtype="float") + Q0 = [] + Q1 = [] + CWV_BIN_WIDTH = cwv_bin_center[1] - cwv_bin_center[0] bin_output_netcdf.close() - - elif bin_output_filename.split('.')[-1]=='mat': - matfile=scipy.io.loadmat(bin_output_filename) - - cwv_bin_center=matfile['cwv'] - P0=matfile['P0'].astype(float) - P1=matfile['P1'] - P2=matfile['P2'] - PE=matfile['PE'].astype(float) - PRECIP_THRESHOLD=matfile['PRECIP_THRESHOLD'][0,0] - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - temp_bin_center=matfile[TAVE_VAR] - Q0=matfile['Q0'].astype(float) - Q1=matfile['Q1'] - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - temp_bin_center=matfile[QSAT_INT_VAR] - Q0=[] - Q1=[] - CWV_BIN_WIDTH=cwv_bin_center[1][0]-cwv_bin_center[0][0] - + + elif bin_output_filename.split('.')[-1] == 'mat': + matfile = scipy.io.loadmat(bin_output_filename) + + cwv_bin_center = matfile['cwv'] + P0 = matfile['P0'].astype(float) + P1 = matfile['P1'] + P2 = matfile['P2'] + PE = matfile['PE'].astype(float) + PRECIP_THRESHOLD = matfile['PRECIP_THRESHOLD'][0, 0] + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + temp_bin_center = matfile[TAVE_VAR] + Q0 = matfile['Q0'].astype(float) + Q1 = matfile['Q1'] + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + temp_bin_center = matfile[QSAT_INT_VAR] + Q0 = [] + Q1 = [] + CWV_BIN_WIDTH = cwv_bin_center[1][0] - cwv_bin_center[0][0] + # Return CWV_BIN_WIDTH & PRECIP_THRESHOLD to make sure that # user-specified parameters are consistent with existing data - return cwv_bin_center,temp_bin_center,P0,P1,P2,PE,Q0,Q1,CWV_BIN_WIDTH,PRECIP_THRESHOLD + return cwv_bin_center, temp_bin_center, P0, P1, P2, PE, Q0, Q1, CWV_BIN_WIDTH, PRECIP_THRESHOLD - else: # If the binned model/obs data does not exist (in practice, for obs data only) + else: # If the binned model/obs data does not exist (in practice, for obs data only) return (numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), @@ -656,27 +669,27 @@ def convecTransBasic_loadAnalyzedData(*argsv): numpy.array([]), numpy.array([]) ) + # ====================================================================== # convecTransBasic_plot # takes output from convecTransBasic_loadAnalyzedData and saves the figure as a ps file -def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): - +def convecTransBasic_plot(ret, argsv1, argsv2, *argsv3): print("Plotting...") # Load binned model data with parameters # CBW:CWV_BIN_WIDTH, PT:PRECIP_THRESHOLD - cwv_bin_center,\ - temp_bin_center,\ - P0,\ - P1,\ - P2,\ - PE,\ - Q0,\ - Q1,\ - CBW,\ - PT=ret - + cwv_bin_center, \ + temp_bin_center, \ + P0, \ + P1, \ + P2, \ + PE, \ + Q0, \ + Q1, \ + CBW, \ + PT = ret + # Load plotting parameters from convecTransBasic_usp_plot.py fig_params = argsv1 @@ -685,36 +698,36 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): # against CBW & PT guarantees the detected binned result # is consistent with parameters defined in # convecTransBasic_usp_calc.py - CWV_BIN_WIDTH,\ - PDF_THRESHOLD,\ - CWV_RANGE_THRESHOLD,\ - CP_THRESHOLD,\ - MODEL,\ - REGION_STR,\ - NUMBER_OF_REGIONS,\ - BULK_TROPOSPHERIC_TEMPERATURE_MEASURE,\ - PRECIP_THRESHOLD,\ - FIG_OUTPUT_DIR,\ - FIG_OUTPUT_FILENAME,\ - OBS,\ - RES,\ - REGION_STR_OBS,\ - FIG_OBS_DIR,\ - FIG_OBS_FILENAME,\ - USE_SAME_COLOR_MAP,\ - OVERLAY_OBS_ON_TOP_OF_MODEL_FIG=argsv3[0] + CWV_BIN_WIDTH, \ + PDF_THRESHOLD, \ + CWV_RANGE_THRESHOLD, \ + CP_THRESHOLD, \ + MODEL, \ + REGION_STR, \ + NUMBER_OF_REGIONS, \ + BULK_TROPOSPHERIC_TEMPERATURE_MEASURE, \ + PRECIP_THRESHOLD, \ + FIG_OUTPUT_DIR, \ + FIG_OUTPUT_FILENAME, \ + OBS, \ + RES, \ + REGION_STR_OBS, \ + FIG_OBS_DIR, \ + FIG_OBS_FILENAME, \ + USE_SAME_COLOR_MAP, \ + OVERLAY_OBS_ON_TOP_OF_MODEL_FIG = argsv3[0] # Load binned OBS data (default: R2TMIv7) - cwv_bin_center_obs,\ - temp_bin_center_obs,\ - P0_obs,\ - P1_obs,\ - P2_obs,\ - PE_obs,\ - Q0_obs,\ - Q1_obs,\ - CWV_BIN_WIDTH_obs,\ - PT_obs=convecTransBasic_loadAnalyzedData(argsv2) + cwv_bin_center_obs, \ + temp_bin_center_obs, \ + P0_obs, \ + P1_obs, \ + P2_obs, \ + PE_obs, \ + Q0_obs, \ + Q1_obs, \ + CWV_BIN_WIDTH_obs, \ + PT_obs = convecTransBasic_loadAnalyzedData(argsv2) # Check whether the detected binned MODEL data is consistent with User-Specified Parameters # (Not all parameters, just 3) @@ -738,19 +751,19 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): # if the binned OBS data exists, checki g by P0_obs==[] if P0_obs.size != 0: # Post-binning Processing before Plotting - P0_obs[P0_obs==0.0]=numpy.nan - P_obs=P1_obs/P0_obs - CP_obs=PE_obs/P0_obs - PDF_obs=numpy.zeros(P0_obs.shape) + P0_obs[P0_obs == 0.0] = numpy.nan + P_obs = P1_obs / P0_obs + CP_obs = PE_obs / P0_obs + PDF_obs = numpy.zeros(P0_obs.shape) for reg in numpy.arange(P0_obs.shape[0]): - PDF_obs[reg,:,:]=P0_obs[reg,:,:]/numpy.nansum(P0_obs[reg,:,:])/CWV_BIN_WIDTH_obs + PDF_obs[reg, :, :] = P0_obs[reg, :, :] / numpy.nansum(P0_obs[reg, :, :]) / CWV_BIN_WIDTH_obs # Bins with PDF>PDF_THRESHOLD - pdf_gt_th_obs=numpy.zeros(PDF_obs.shape) + pdf_gt_th_obs = numpy.zeros(PDF_obs.shape) with numpy.errstate(invalid="ignore"): - pdf_gt_th_obs[PDF_obs>PDF_THRESHOLD]=1 + pdf_gt_th_obs[PDF_obs > PDF_THRESHOLD] = 1 # Indicator of (temp,reg) with wide CWV range - t_reg_I_obs=(numpy.squeeze(numpy.sum(pdf_gt_th_obs,axis=1))*CWV_BIN_WIDTH_obs>CWV_RANGE_THRESHOLD) + t_reg_I_obs = (numpy.squeeze(numpy.sum(pdf_gt_th_obs, axis=1)) * CWV_BIN_WIDTH_obs > CWV_RANGE_THRESHOLD) ### Connected Component Section # The CWV_RANGE_THRESHOLD-Criterion must be satisfied by a connected component @@ -763,99 +776,100 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): for Tidx in numpy.arange(P0_obs.shape[2]): if t_reg_I_obs[reg, Tidx]: dg = networkx.DiGraph() - for cwv_idx in numpy.arange(pdf_gt_th_obs.shape[1]-1): - if pdf_gt_th_obs[reg, cwv_idx,Tidx] > 0 and pdf_gt_th_obs[reg, cwv_idx+1, Tidx] > 0: - networkx.add_path(dg, [cwv_idx, cwv_idx+1]) + for cwv_idx in numpy.arange(pdf_gt_th_obs.shape[1] - 1): + if pdf_gt_th_obs[reg, cwv_idx, Tidx] > 0 and pdf_gt_th_obs[reg, cwv_idx + 1, Tidx] > 0: + networkx.add_path(dg, [cwv_idx, cwv_idx + 1]) largest = max((dg.subgraph(c) for c in networkx.weakly_connected_components(dg)), key=len) bcc = largest.nodes() # Biggest Connected Component - if sum(pdf_gt_th_obs[reg, bcc, Tidx])*CWV_BIN_WIDTH_obs > CWV_RANGE_THRESHOLD: + if sum(pdf_gt_th_obs[reg, bcc, Tidx]) * CWV_BIN_WIDTH_obs > CWV_RANGE_THRESHOLD: t_reg_I_obs[reg, Tidx] = True #pdf_gt_th_obs[reg,:,Tidx]=0 #pdf_gt_th_obs[reg,bcc,Tidx]=1 else: - t_reg_I_obs[reg, Tidx]=False + t_reg_I_obs[reg, Tidx] = False #pdf_gt_th_obs[reg,:,Tidx]=0 ### End of Connected Component Section # Copy P1, CP into p1, cp for (temp,reg) with "wide CWV range" & "large PDF" - p1_obs=numpy.zeros(P1_obs.shape) - cp_obs=numpy.zeros(CP_obs.shape) + p1_obs = numpy.zeros(P1_obs.shape) + cp_obs = numpy.zeros(CP_obs.shape) for reg in numpy.arange(P1_obs.shape[0]): for Tidx in numpy.arange(P1_obs.shape[2]): - if t_reg_I_obs[reg,Tidx]: - p1_obs[reg,:,Tidx]=numpy.copy(P_obs[reg,:,Tidx]) - cp_obs[reg,:,Tidx]=numpy.copy(CP_obs[reg,:,Tidx]) - p1_obs[pdf_gt_th_obs==0]=numpy.nan - cp_obs[pdf_gt_th_obs==0]=numpy.nan - pdf_obs=numpy.copy(PDF_obs) + if t_reg_I_obs[reg, Tidx]: + p1_obs[reg, :, Tidx] = numpy.copy(P_obs[reg, :, Tidx]) + cp_obs[reg, :, Tidx] = numpy.copy(CP_obs[reg, :, Tidx]) + p1_obs[pdf_gt_th_obs == 0] = numpy.nan + cp_obs[pdf_gt_th_obs == 0] = numpy.nan + pdf_obs = numpy.copy(PDF_obs) for reg in numpy.arange(P1_obs.shape[0]): for Tidx in numpy.arange(P1_obs.shape[2]): - if (t_reg_I_obs[reg,Tidx] and cp_obs[reg,:,Tidx][cp_obs[reg,:,Tidx]>=0.0].size>0): - if (numpy.max(cp_obs[reg,:,Tidx][cp_obs[reg,:,Tidx]>=0])= 0.0].size > 0): + if (numpy.max(cp_obs[reg, :, Tidx][cp_obs[reg, :, Tidx] >= 0]) < CP_THRESHOLD): + t_reg_I_obs[reg, Tidx] = False else: - t_reg_I_obs[reg,Tidx]=False - + t_reg_I_obs[reg, Tidx] = False + for reg in numpy.arange(P1_obs.shape[0]): for Tidx in numpy.arange(P1_obs.shape[2]): - if (~t_reg_I_obs[reg,Tidx]): - p1_obs[reg,:,Tidx]=numpy.nan - cp_obs[reg,:,Tidx]=numpy.nan - pdf_obs[reg,:,Tidx]=numpy.nan - pdf_pe_obs=pdf_obs*cp_obs + if (~t_reg_I_obs[reg, Tidx]): + p1_obs[reg, :, Tidx] = numpy.nan + cp_obs[reg, :, Tidx] = numpy.nan + pdf_obs[reg, :, Tidx] = numpy.nan + pdf_pe_obs = pdf_obs * cp_obs # Temperature range for plotting - TEMP_MIN_obs=numpy.where(numpy.sum(t_reg_I_obs,axis=0)>=1)[0][0] - TEMP_MAX_obs=numpy.where(numpy.sum(t_reg_I_obs,axis=0)>=1)[0][-1] + TEMP_MIN_obs = numpy.where(numpy.sum(t_reg_I_obs, axis=0) >= 1)[0][0] + TEMP_MAX_obs = numpy.where(numpy.sum(t_reg_I_obs, axis=0) >= 1)[0][-1] # ====================================================================== # ======================Start Plot OBS Binned Data====================== # ====================================================================== - NoC=TEMP_MAX_obs-TEMP_MIN_obs+1 # Number of Colors - scatter_colors = cm.jet(numpy.linspace(0,1,NoC,endpoint=True)) + NoC = TEMP_MAX_obs - TEMP_MIN_obs + 1 # Number of Colors + scatter_colors = cm.jet(numpy.linspace(0, 1, NoC, endpoint=True)) - axes_fontsize,legend_fonsize,marker_size,xtick_pad,figsize1,figsize2 = fig_params['f0'] + axes_fontsize, legend_fonsize, marker_size, xtick_pad, figsize1, figsize2 = fig_params['f0'] print(" Plotting OBS Figure..."), # create figure canvas fig_obs = mp.figure(figsize=(figsize1, figsize2)) - fig_obs.suptitle('Convective Transition Basic Statistics ('+OBS+', '+RES+'$^{\circ}$)', + fig_obs.suptitle('Convective Transition Basic Statistics (' + OBS + ', ' + RES + '$^{\circ}$)', y=1.04, fontsize=16) #Change y=1.04 to 1.02 for Python3. for reg in numpy.arange(NUMBER_OF_REGIONS): # create figure 1 - ax1 = fig_obs.add_subplot(NUMBER_OF_REGIONS,4,1+reg*NUMBER_OF_REGIONS) + ax1 = fig_obs.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg * NUMBER_OF_REGIONS) ax1.set_xlim(fig_params['f1'][0]) ax1.set_ylim(fig_params['f1'][1]) ax1.set_xticks(fig_params['f1'][4]) ax1.set_yticks(fig_params['f1'][5]) ax1.tick_params(labelsize=axes_fontsize) ax1.tick_params(axis="x", pad=10) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1): - ax1.scatter(cwv_bin_center_obs,p1_obs[reg,:,Tidx],\ - edgecolor="none",facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:],\ - s=marker_size,clip_on=True,zorder=3,\ + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1): + ax1.scatter(cwv_bin_center_obs, p1_obs[reg, :, Tidx], \ + edgecolor="none", facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], \ + s=marker_size, clip_on=True, zorder=3, \ label="{:.0f}".format(temp_bin_center_obs[Tidx])) - elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2): - ax1.scatter(cwv_bin_center_obs,p1_obs[reg,:,Tidx],\ - edgecolor="none",facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:],\ - s=marker_size,clip_on=True,zorder=3,\ + elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2): + ax1.scatter(cwv_bin_center_obs, p1_obs[reg, :, Tidx], \ + edgecolor="none", facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], \ + s=marker_size, clip_on=True, zorder=3, \ label="{:.1f}".format(temp_bin_center_obs[Tidx])) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1): - ax1.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2,facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size,clip_on=True,zorder=3,marker="^", + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1): + ax1.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3, marker="^", label=': $\widehat{q_{sat}}$ (Column-integrated Saturation Specific Humidity)') - elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2): - ax1.scatter(temp_bin_center_obs[Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2, - facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size,clip_on=True,zorder=3,marker="^", + elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2): + ax1.scatter(temp_bin_center_obs[Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3, marker="^", label=': $\widehat{q_{sat}}$ (Column-integrated Saturation Specific Humidity)') ax1.set_xlabel(fig_params['f1'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f1'][3], fontsize=axes_fontsize) @@ -863,19 +877,19 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ax1.set_axisbelow(True) handles, labels = ax1.get_legend_handles_labels() - num_handles = sum(t_reg_I_obs[reg,:]) + num_handles = sum(t_reg_I_obs[reg, :]) leg = ax1.legend(handles[0:num_handles], labels[0:num_handles], fontsize=axes_fontsize, bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.1, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax1.add_artist(leg) - if reg==0: + if reg == 0: ax1.text(s='Precip. cond. avg. on CWV', x=0.5, y=1.05, transform=ax1.transAxes, fontsize=12, ha='center', va='bottom') # create figure 2 (probability pickup) - ax2 = fig_obs.add_subplot(NUMBER_OF_REGIONS,4,2+reg*NUMBER_OF_REGIONS) + ax2 = fig_obs.add_subplot(NUMBER_OF_REGIONS, 4, 2 + reg * NUMBER_OF_REGIONS) ax2.set_xlim(fig_params['f2'][0]) ax2.set_ylim(fig_params['f2'][1]) ax2.set_xticks(fig_params['f2'][4]) @@ -883,21 +897,21 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=xtick_pad) for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): - if t_reg_I_obs[reg,Tidx]: - ax2.scatter(cwv_bin_center_obs,cp_obs[reg, :, Tidx], - edgecolor="none",facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size, clip_on=True,zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + ax2.scatter(cwv_bin_center_obs, cp_obs[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): if t_reg_I_obs[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax2.scatter(Q1_obs[reg, Tidx]/Q0_obs[reg, Tidx], fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2, - facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + ax2.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax2.scatter(temp_bin_center_obs[Tidx],fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2, - facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], + ax2.scatter(temp_bin_center_obs[Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], s=marker_size, clip_on=True, zorder=3, marker="^") ax2.set_xlabel(fig_params['f2'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f2'][3], fontsize=axes_fontsize) @@ -905,12 +919,12 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): verticalalignment="top") ax2.grid() ax2.set_axisbelow(True) - if reg==0: + if reg == 0: ax2.text(s='Prob. of Precip.>' + str(PT_obs) + 'mm/h', x=0.5, y=1.05, transform=ax2.transAxes, fontsize=12, ha='center', va='bottom') # create figure 3 (normalized PDF) - ax3 = fig_obs.add_subplot(NUMBER_OF_REGIONS,4,3+reg*NUMBER_OF_REGIONS) + ax3 = fig_obs.add_subplot(NUMBER_OF_REGIONS, 4, 3 + reg * NUMBER_OF_REGIONS) ax3.set_yscale("log") ax3.set_xlim(fig_params['f3'][0]) ax3.set_ylim(fig_params['f3'][1]) @@ -919,79 +933,81 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ax3.tick_params(axis="x", pad=xtick_pad) for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): if t_reg_I_obs[reg, Tidx]: - ax3.scatter(cwv_bin_center_obs,PDF_obs[reg, :, Tidx], - edgecolor="none",facecolor=scatter_colors[Tidx-TEMP_MIN_obs, :], + ax3.scatter(cwv_bin_center_obs, PDF_obs[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], s=marker_size, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax3.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx],fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2, - facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + ax3.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax3.scatter(temp_bin_center_obs[Tidx],fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2, - facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + ax3.scatter(temp_bin_center_obs[Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], + s=marker_size, clip_on=True, zorder=3, marker="^") ax3.set_xlabel(fig_params['f3'][2], fontsize=axes_fontsize) ax3.set_ylabel(fig_params['f3'][3], fontsize=axes_fontsize) ax3.grid() ax3.set_axisbelow(True) - if reg==0: + if reg == 0: ax3.text(s='PDF of CWV', x=0.5, y=1.05, transform=ax3.transAxes, fontsize=12, ha='center', va='bottom') # create figure 4 (normalized PDF - precipitation) - ax4 = fig_obs.add_subplot(NUMBER_OF_REGIONS,4,4+reg*NUMBER_OF_REGIONS) + ax4 = fig_obs.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg * NUMBER_OF_REGIONS) ax4.set_yscale("log") ax4.set_xlim(fig_params['f4'][0]) ax4.set_ylim(fig_params['f4'][1]) ax4.set_xticks(fig_params['f4'][4]) ax4.tick_params(labelsize=axes_fontsize) ax4.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - ax4.scatter(cwv_bin_center_obs,pdf_pe_obs[reg,:,Tidx],\ - edgecolor="none",facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:],\ - s=marker_size,clip_on=True,zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1): - ax4.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx],fig_params['f4'][1][1]*0.83,\ - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2,facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:],\ - s=marker_size,clip_on=True,zorder=3,marker="^") - elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2): - ax4.scatter(temp_bin_center_obs[Tidx],fig_params['f4'][1][1]*0.83,\ - edgecolor=scatter_colors[Tidx-TEMP_MIN_obs,:]/2,facecolor=scatter_colors[Tidx-TEMP_MIN_obs,:],\ - s=marker_size,clip_on=True,zorder=3,marker="^") + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + ax4.scatter(cwv_bin_center_obs, pdf_pe_obs[reg, :, Tidx], \ + edgecolor="none", facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], \ + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1): + ax4.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx], fig_params['f4'][1][1] * 0.83, \ + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], \ + s=marker_size, clip_on=True, zorder=3, marker="^") + elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2): + ax4.scatter(temp_bin_center_obs[Tidx], fig_params['f4'][1][1] * 0.83, \ + edgecolor=scatter_colors[Tidx - TEMP_MIN_obs, :] / 2, + facecolor=scatter_colors[Tidx - TEMP_MIN_obs, :], \ + s=marker_size, clip_on=True, zorder=3, marker="^") ax4.set_xlabel(fig_params['f4'][2], fontsize=axes_fontsize) ax4.set_ylabel(fig_params['f4'][3], fontsize=axes_fontsize) - ax4.text(0.05, 0.95, "Precip > "+str(PT_obs)+" mm h$^-$$^1$", + ax4.text(0.05, 0.95, "Precip > " + str(PT_obs) + " mm h$^-$$^1$", transform=ax4.transAxes, fontsize=12, verticalalignment="top") ax4.grid() ax4.set_axisbelow(True) if reg == 0: - ax4.text(s='PDF of CWV for Precip.>'+str(PT_obs)+'mm/h', x=0.49, y=1.05, + ax4.text(s='PDF of CWV for Precip.>' + str(PT_obs) + 'mm/h', x=0.49, y=1.05, transform=ax4.transAxes, fontsize=12, ha='center', va='bottom') if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - temp_str = '$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)'\ + temp_str = '$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)' \ ' used as the bulk tropospheric temperature measure' elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - temp_str = '$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)'\ + temp_str = '$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ 'used as the bulk tropospheric temperature measure' fig_obs.text(s=temp_str, x=0, y=0, ha='left', va='top', transform=fig_obs.transFigure, fontsize=12) - triag_qsat_str = '$\Delta$: $\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity;'\ - ' Units: mm)' + triag_qsat_str = '$\Delta$: $\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity;' \ + ' Units: mm)' fig_obs.text(s=triag_qsat_str, x=0, y=-0.02, ha='left', va='top', transform=fig_obs.transFigure, fontsize=12) - + # set layout to tight (so that space between figures is minimized) fig_obs.tight_layout() - fig_obs.savefig(FIG_OBS_DIR+"/"+FIG_OBS_FILENAME, bbox_inches="tight") - + fig_obs.savefig(FIG_OBS_DIR + "/" + FIG_OBS_FILENAME, bbox_inches="tight") + print("...Completed!") - print(" OBS Figure saved as "+FIG_OBS_DIR+"/"+FIG_OBS_FILENAME+"!") + print(" OBS Figure saved as " + FIG_OBS_DIR + "/" + FIG_OBS_FILENAME + "!") # ====================================================================== # =======================End Plot OBS Binned Data======================= # ====================================================================== @@ -999,18 +1015,18 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): # Post-binning Processing before Plotting P0[P0 == 0.0] = numpy.nan - P=P1/P0 - CP=PE/P0 - PDF=numpy.zeros(P0.shape) + P = P1 / P0 + CP = PE / P0 + PDF = numpy.zeros(P0.shape) for reg in numpy.arange(P0.shape[0]): - PDF[reg,:,:]=P0[reg,:,:]/numpy.nansum(P0[reg,:,:])/CBW + PDF[reg, :, :] = P0[reg, :, :] / numpy.nansum(P0[reg, :, :]) / CBW # Bins with PDF>PDF_THRESHOLD - pdf_gt_th=numpy.zeros(PDF.shape) + pdf_gt_th = numpy.zeros(PDF.shape) with numpy.errstate(invalid="ignore"): - pdf_gt_th[PDF>PDF_THRESHOLD]=1 + pdf_gt_th[PDF > PDF_THRESHOLD] = 1 # Indicator of (temp,reg) with wide CWV range - t_reg_I = (numpy.squeeze(numpy.sum(pdf_gt_th,axis=1))*CBW>CWV_RANGE_THRESHOLD) + t_reg_I = (numpy.squeeze(numpy.sum(pdf_gt_th, axis=1)) * CBW > CWV_RANGE_THRESHOLD) ### Connected Component Section # The CWV_RANGE_THRESHOLD-Criterion must be satisfied by a connected component @@ -1019,67 +1035,67 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): # But when models behave "funny" one may miss by turning on this section # For fitting procedure (finding critical CWV at which the precip picks up) # Default: on -# for reg in numpy.arange(P0.shape[0]): -# for Tidx in numpy.arange(P0.shape[2]): -# if t_reg_I[reg,Tidx]: -# G=networkx.DiGraph() -# for cwv_idx in numpy.arange(pdf_gt_th.shape[1]-1): -# if (pdf_gt_th[reg,cwv_idx,Tidx]>0 and pdf_gt_th[reg,cwv_idx+1,Tidx]>0): -# G.add_path([cwv_idx,cwv_idx+1]) -# largest = max(networkx.weakly_connected_component_subgraphs(G),key=len) -# bcc=largest.nodes() # Biggest Connected Component -# if (sum(pdf_gt_th[reg,bcc,Tidx])*CBW>CWV_RANGE_THRESHOLD): -# t_reg_I[reg,Tidx]=True -# #pdf_gt_th[reg,:,Tidx]=0 -# #pdf_gt_th[reg,bcc,Tidx]=1 -# else: -# t_reg_I[reg,Tidx]=False -# #pdf_gt_th[reg,:,Tidx]=0 -# End of Connected Component Section + # for reg in numpy.arange(P0.shape[0]): + # for Tidx in numpy.arange(P0.shape[2]): + # if t_reg_I[reg,Tidx]: + # G=networkx.DiGraph() + # for cwv_idx in numpy.arange(pdf_gt_th.shape[1]-1): + # if (pdf_gt_th[reg,cwv_idx,Tidx]>0 and pdf_gt_th[reg,cwv_idx+1,Tidx]>0): + # G.add_path([cwv_idx,cwv_idx+1]) + # largest = max(networkx.weakly_connected_component_subgraphs(G),key=len) + # bcc=largest.nodes() # Biggest Connected Component + # if (sum(pdf_gt_th[reg,bcc,Tidx])*CBW>CWV_RANGE_THRESHOLD): + # t_reg_I[reg,Tidx]=True + # #pdf_gt_th[reg,:,Tidx]=0 + # #pdf_gt_th[reg,bcc,Tidx]=1 + # else: + # t_reg_I[reg,Tidx]=False + # #pdf_gt_th[reg,:,Tidx]=0 + # End of Connected Component Section # Copy P1, CP into p1, cp for (temp,reg) with "wide CWV range" & "large PDF" - p1=numpy.zeros(P1.shape) - cp=numpy.zeros(CP.shape) + p1 = numpy.zeros(P1.shape) + cp = numpy.zeros(CP.shape) for reg in numpy.arange(P1.shape[0]): for Tidx in numpy.arange(P1.shape[2]): if t_reg_I[reg, Tidx]: - p1[reg, :,Tidx]=numpy.copy(P[reg, :, Tidx]) - cp[reg, :, Tidx]=numpy.copy(CP[reg, :, Tidx]) + p1[reg, :, Tidx] = numpy.copy(P[reg, :, Tidx]) + cp[reg, :, Tidx] = numpy.copy(CP[reg, :, Tidx]) p1[pdf_gt_th == 0] = numpy.nan cp[pdf_gt_th == 0] = numpy.nan - pdf=numpy.copy(PDF) + pdf = numpy.copy(PDF) for reg in numpy.arange(P1.shape[0]): for Tidx in numpy.arange(P1.shape[2]): - if (t_reg_I[reg,Tidx] and cp[reg,:,Tidx][cp[reg,:,Tidx]>=0.0].size>0): - if (numpy.max(cp[reg,:,Tidx][cp[reg,:,Tidx]>=0])= 0.0].size > 0): + if (numpy.max(cp[reg, :, Tidx][cp[reg, :, Tidx] >= 0]) < CP_THRESHOLD): + t_reg_I[reg, Tidx] = False else: t_reg_I[reg, Tidx] = False - + for reg in numpy.arange(P1.shape[0]): for Tidx in numpy.arange(P1.shape[2]): if ~t_reg_I[reg, Tidx]: p1[reg, :, Tidx] = numpy.nan cp[reg, :, Tidx] = numpy.nan pdf[reg, :, Tidx] = numpy.nan - pdf_pe = pdf*cp + pdf_pe = pdf * cp # Temperature range for plotting TEMP_MIN = numpy.where(numpy.sum(t_reg_I, axis=0) >= 1)[0][0] TEMP_MAX = numpy.where(numpy.sum(t_reg_I, axis=0) >= 1)[0][-1] # Use OBS to set colormap (but if they don't exist or users don't want to...) if P0_obs.size == 0 or not USE_SAME_COLOR_MAP: - TEMP_MIN_obs=TEMP_MIN - TEMP_MAX_obs=TEMP_MAX + TEMP_MIN_obs = TEMP_MIN + TEMP_MAX_obs = TEMP_MAX # ====================================================================== # =====================Start Plot MODEL Binned Data===================== # ====================================================================== - NoC=TEMP_MAX_obs-TEMP_MIN_obs+1 # Number of Colors - scatter_colors = cm.jet(numpy.linspace(0,1,NoC,endpoint=True)) + NoC = TEMP_MAX_obs - TEMP_MIN_obs + 1 # Number of Colors + scatter_colors = cm.jet(numpy.linspace(0, 1, NoC, endpoint=True)) - axes_fontsize,legend_fonsize,marker_size,xtick_pad,figsize1,figsize2 = fig_params['f0'] + axes_fontsize, legend_fonsize, marker_size, xtick_pad, figsize1, figsize2 = fig_params['f0'] print("Plotting MODEL Figure..."), @@ -1089,45 +1105,45 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): for reg in numpy.arange(NUMBER_OF_REGIONS): # create figure 1 - ax1 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg*NUMBER_OF_REGIONS) + ax1 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg * NUMBER_OF_REGIONS) ax1.set_xlim(fig_params['f1'][0]) ax1.set_ylim(fig_params['f1'][1]) ax1.set_xticks(fig_params['f1'][4]) ax1.set_yticks(fig_params['f1'][5]) ax1.tick_params(labelsize=axes_fontsize) ax1.tick_params(axis="x", pad=10) - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): - if t_reg_I[reg,Tidx]: + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax1.scatter(cwv_bin_center,p1[reg, :, Tidx], - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax1.scatter(cwv_bin_center, p1[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, label="{:.0f}".format(temp_bin_center[Tidx])) elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: ax1.scatter(cwv_bin_center, p1[reg, :, Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, label="{:.1f}".format(temp_bin_center[Tidx])) - for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN), max(TEMP_MAX_obs+1, TEMP_MAX+1)): + for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN), max(TEMP_MAX_obs + 1, TEMP_MAX + 1)): if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and P0_obs.size != 0 and t_reg_I_obs[reg, Tidx]: - ax1.scatter(cwv_bin_center_obs,p1_obs[reg,:,Tidx], - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size/5, clip_on=True, zorder=3, - label='Statistics for ' + OBS + ' (spatial resolution: ' + RES+ '$^{\circ}$)') - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): + ax1.scatter(cwv_bin_center_obs, p1_obs[reg, :, Tidx], + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size / 5, clip_on=True, zorder=3, + label='Statistics for ' + OBS + ' (spatial resolution: ' + RES + '$^{\circ}$)') + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax1.scatter(Q1[reg,Tidx]/Q0[reg, Tidx], fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=4,marker="^", + ax1.scatter(Q1[reg, Tidx] / Q0[reg, Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=4, marker="^", label=': $\widehat{q_{sat}}$ (Column-integrated Saturation Specific Humidity)') - elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2): - ax1.scatter(temp_bin_center[Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True, zorder=4, marker="^", + elif (BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2): + ax1.scatter(temp_bin_center[Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=4, marker="^", label=': $\widehat{q_{sat}}$ (Column-integrated Saturation Specific Humidity)') ax1.set_xlabel(fig_params['f1'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f1'][3], fontsize=axes_fontsize) @@ -1135,48 +1151,48 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ax1.set_axisbelow(True) handles, labels = ax1.get_legend_handles_labels() - num_handles = sum(t_reg_I[reg,:]) + num_handles = sum(t_reg_I[reg, :]) leg = ax1.legend(handles[0:num_handles], labels[0:num_handles], fontsize=axes_fontsize, - bbox_to_anchor=(0.05,0.95), + bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.1, - fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax1.add_artist(leg) - if reg==0: + if reg == 0: ax1.text(s='Precip. cond. avg. on CWV', x=0.5, y=1.05, transform=ax1.transAxes, fontsize=12, ha='center', va='bottom') # create figure 2 (probability pickup) - ax2 = fig.add_subplot(NUMBER_OF_REGIONS,4,2+reg*NUMBER_OF_REGIONS) + ax2 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 2 + reg * NUMBER_OF_REGIONS) ax2.set_xlim(fig_params['f2'][0]) ax2.set_ylim(fig_params['f2'][1]) ax2.set_xticks(fig_params['f2'][4]) ax2.set_yticks(fig_params['f2'][5]) ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: - ax2.scatter(cwv_bin_center,cp[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=3) - for Tidx in numpy.arange(min(TEMP_MIN_obs,TEMP_MIN), max(TEMP_MAX_obs+1,TEMP_MAX+1)): + ax2.scatter(cwv_bin_center, cp[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN), max(TEMP_MAX_obs + 1, TEMP_MAX + 1)): if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and P0_obs.size != 0 and t_reg_I_obs[reg, Tidx]: - ax2.scatter(cwv_bin_center_obs, cp_obs[reg,:,Tidx], - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size/5, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): + ax2.scatter(cwv_bin_center_obs, cp_obs[reg, :, Tidx], + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size / 5, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax2.scatter(Q1[reg,Tidx]/Q0[reg,Tidx], fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=4,marker="^") + ax2.scatter(Q1[reg, Tidx] / Q0[reg, Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=4, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax2.scatter(temp_bin_center[Tidx], fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True, zorder=4, marker="^") + ax2.scatter(temp_bin_center[Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=4, marker="^") ax2.set_xlabel(fig_params['f2'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f2'][3], fontsize=axes_fontsize) ax2.text(0.05, 0.95, REGION_STR[reg], transform=ax2.transAxes, fontsize=12, fontweight="bold", @@ -1184,39 +1200,39 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ax2.grid() ax2.set_axisbelow(True) if reg == 0: - ax2_text = ax2.text(s='Prob. of Precip.>'+str(PT)+'mm/h', x=0.5, y=1.05, + ax2_text = ax2.text(s='Prob. of Precip.>' + str(PT) + 'mm/h', x=0.5, y=1.05, transform=ax2.transAxes, fontsize=12, ha='center', va='bottom') # create figure 3 (normalized PDF) - ax3 = fig.add_subplot(NUMBER_OF_REGIONS,4,3+reg*NUMBER_OF_REGIONS) + ax3 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 3 + reg * NUMBER_OF_REGIONS) ax3.set_yscale("log") ax3.set_xlim(fig_params['f3'][0]) ax3.set_ylim(fig_params['f3'][1]) ax3.set_xticks(fig_params['f3'][4]) ax3.tick_params(labelsize=axes_fontsize) ax3.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: - ax3.scatter(cwv_bin_center,PDF[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=3) - for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN),max(TEMP_MAX_obs+1, TEMP_MAX+1)): + ax3.scatter(cwv_bin_center, PDF[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN), max(TEMP_MAX_obs + 1, TEMP_MAX + 1)): if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and P0_obs.size != 0 and t_reg_I_obs[reg, Tidx]: - ax3.scatter(cwv_bin_center_obs,PDF_obs[reg,:,Tidx], - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size/5, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: + ax3.scatter(cwv_bin_center_obs, PDF_obs[reg, :, Tidx], + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size / 5, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax3.scatter(Q1[reg, Tidx]/Q0[reg, Tidx], fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=4,marker="^") + ax3.scatter(Q1[reg, Tidx] / Q0[reg, Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=4, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax3.scatter(temp_bin_center[Tidx],fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax3.scatter(temp_bin_center[Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=4, marker="^") ax3.set_xlabel(fig_params['f3'][2], fontsize=axes_fontsize) ax3.set_ylabel(fig_params['f3'][3], fontsize=axes_fontsize) @@ -1227,44 +1243,45 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): ha='center', va='bottom') # create figure 4 (normalized PDF - precipitation) - ax4 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg*NUMBER_OF_REGIONS) + ax4 = fig.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg * NUMBER_OF_REGIONS) ax4.set_yscale("log") ax4.set_xlim(fig_params['f4'][0]) ax4.set_ylim(fig_params['f4'][1]) ax4.set_xticks(fig_params['f4'][4]) ax4.tick_params(labelsize=axes_fontsize) ax4.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: - ax4.scatter(cwv_bin_center,pdf_pe[reg, :, Tidx], - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax4.scatter(cwv_bin_center, pdf_pe[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3) - for Tidx in numpy.arange(min(TEMP_MIN_obs,TEMP_MIN), max(TEMP_MAX_obs+1,TEMP_MAX+1)): + for Tidx in numpy.arange(min(TEMP_MIN_obs, TEMP_MIN), max(TEMP_MAX_obs + 1, TEMP_MAX + 1)): if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and P0_obs.size != 0 and t_reg_I_obs[reg, Tidx]: ax4.scatter(cwv_bin_center_obs, pdf_pe_obs[reg, :, Tidx], - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size/5, clip_on=True, zorder=3) + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size / 5, clip_on=True, zorder=3) for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): - if t_reg_I[reg,Tidx]: + if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax4.scatter(Q1[reg,Tidx]/Q0[reg,Tidx],fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + ax4.scatter(Q1[reg, Tidx] / Q0[reg, Tidx], fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=4, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax4.scatter(temp_bin_center[Tidx], fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax4.scatter(temp_bin_center[Tidx], fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=4, marker="^") ax4.set_xlabel(fig_params['f4'][2], fontsize=axes_fontsize) ax4.set_ylabel(fig_params['f4'][3], fontsize=axes_fontsize) - ax4.text(0.05, 0.95, "Precip > "+str(PT)+" mm h$^-$$^1$", transform=ax4.transAxes, fontsize=12, + ax4.text(0.05, 0.95, "Precip > " + str(PT) + " mm h$^-$$^1$", transform=ax4.transAxes, fontsize=12, verticalalignment="top") ax4.grid() ax4.set_axisbelow(True) if reg == 0: - ax4.text(s='PDF of CWV for Precip.>' + str(PT) + 'mm/h', x=0.49, y=1.05, transform=ax4.transAxes, fontsize=12, + ax4.text(s='PDF of CWV for Precip.>' + str(PT) + 'mm/h', x=0.49, y=1.05, transform=ax4.transAxes, + fontsize=12, ha='center', va='bottom') fig.text(s=temp_str, x=0, y=0, ha='left', va='top', transform=fig.transFigure, fontsize=12) @@ -1277,7 +1294,7 @@ def convecTransBasic_plot(ret,argsv1,argsv2,*argsv3): # set layout to tight (so that space between figures is minimized) fig.tight_layout() fig.savefig(FIG_OUTPUT_DIR + "/" + FIG_OUTPUT_FILENAME, bbox_inches="tight") - + print("...Completed!") print(" Figure saved as " + FIG_OUTPUT_DIR + "/" + FIG_OUTPUT_FILENAME + "!") # ====================================================================== diff --git a/diagnostics/convective_transition_diag/convecTransCriticalCollapse.py b/diagnostics/convective_transition_diag/convecTransCriticalCollapse.py index ce66f74c5..72cf8b695 100644 --- a/diagnostics/convective_transition_diag/convecTransCriticalCollapse.py +++ b/diagnostics/convective_transition_diag/convecTransCriticalCollapse.py @@ -5,14 +5,14 @@ # # Convective Transition Critical Collapse # as part of functionality provided by -# Convective Transiton Diagnostic Package (convective_transition_diag_v1r2.py) +# Convective Transition Diagnostic Package (convective_transition_diag_v1r2.py) # # Version 1 revision 3 3-Nov-2017 Yi-Hung Kuo (UCLA) # Contributors: K. A. Schiro (UCLA), B. Langenbrunner (UCLA), F. Ahmed (UCLA), # C. Martinez (UCLA), C.-C. (Jack) Chen (NCAR) # PI: J. David Neelin (UCLA) # -# Computes Citical CWV for Convective Transition Statistics following +# Computes Critical CWV for Convective Transition Statistics following # Kuo et al. (2017a, 2017b), similar to Sahany et al. (2012) # # Generates plots of: @@ -84,6 +84,7 @@ from convecTransCriticalCollapse_util import convecTransCriticalCollapse_loadAnalyzedData from convecTransCriticalCollapse_util import convecTransCriticalCollapse_fitCritical from convecTransCriticalCollapse_util import convecTransCriticalCollapse_plot + print("**************************************************") print("Excuting Convective Transition Critical Collapse (convecTransCriticalCollapse.py)......") print("**************************************************") @@ -95,39 +96,39 @@ print("Load user-specified binning parameters..."), # Create and read user-specified parameters -os.system("python "+os.environ["POD_HOME"]+"/convecTransCriticalCollapse_usp.py") -with open(os.environ["WORK_DIR"]+"/convecTransCriticalCollapse_parameters.json") as outfile: - params_data=json.load(outfile) +os.system("python " + os.environ["POD_HOME"] + "/convecTransCriticalCollapse_usp.py") +with open(os.environ["WORK_DIR"] + "/convecTransCriticalCollapse_parameters.json") as outfile: + params_data = json.load(outfile) print("...Loaded!") # ====================================================================== # Check if binned MODEL data from convecTransBasic.py # exists in wkdir/casename/ from a previous computation -if (len(params_data["bin_output_list"])!=0): # binned MODEL data exists +if len(params_data["bin_output_list"]) != 0: # binned MODEL data exists print("Binned output detected...") - binned_model=convecTransCriticalCollapse_loadAnalyzedData(params_data["args1"]) - binned_obs=convecTransCriticalCollapse_loadAnalyzedData(params_data["args2"]) + binned_model = convecTransCriticalCollapse_loadAnalyzedData(params_data["args1"]) + binned_obs = convecTransCriticalCollapse_loadAnalyzedData(params_data["args2"]) print("Binned output Loaded!") - print("Starting fitting procedure..."), - plot_model=convecTransCriticalCollapse_fitCritical(binned_model,params_data["fit_model_params"]) - plot_obs=convecTransCriticalCollapse_fitCritical(binned_obs,params_data["fit_obs_params"]) + print("Starting fitting procedure..."), + plot_model = convecTransCriticalCollapse_fitCritical(binned_model, params_data["fit_model_params"]) + plot_obs = convecTransCriticalCollapse_fitCritical(binned_obs, params_data["fit_obs_params"]) print("...Fitted!") # ====================================================================== # Plot binning results & save the figure in wkdir/casename/.../ - convecTransCriticalCollapse_plot(binned_model,plot_model,\ - binned_obs,plot_obs,\ - params_data["args3"],params_data["plot_params"]) - print("Plotting Complete!") + convecTransCriticalCollapse_plot(binned_model, plot_model, + binned_obs, plot_obs, + params_data["args3"], params_data["plot_params"]) + print("Plotting Complete!") -else: +else: print("Binned output from convecTransBasic.py does not exists!") - print(" If you are certain that binned output exists, "\ - +"please double-check convecTransCriticalCollapse_usp.py, "\ - +"making sure that it is consistent with "\ - +"convecTransBasic_usp_calc.py & convecTransBasic_usp_plot.py!") + print(" If you are certain that binned output exists, " \ + + "please double-check convecTransCriticalCollapse_usp.py, " \ + + "making sure that it is consistent with " \ + + "convecTransBasic_usp_calc.py & convecTransBasic_usp_plot.py!") print("**************************************************") print("Convective Transition Thermodynamic Critical Collapse (convecTransCriticalCollapse.py) Executed!") diff --git a/diagnostics/convective_transition_diag/convecTransCriticalCollapse_usp.py b/diagnostics/convective_transition_diag/convecTransCriticalCollapse_usp.py index 5e28f90ac..dd4d7bc2c 100644 --- a/diagnostics/convective_transition_diag/convecTransCriticalCollapse_usp.py +++ b/diagnostics/convective_transition_diag/convecTransCriticalCollapse_usp.py @@ -17,63 +17,63 @@ # START USER SPECIFIED SECTION # ====================================================================== # Model name (will show up in the MODEL figure) -MODEL=os.environ["CASENAME"] +MODEL = os.environ["CASENAME"] # Spatial resolution for OBS (default: R2+TMIv7) # Default: "0.25" rather than os.environ["RES"] # because the total number of events for OBS is "small" -RES="0.25" +RES = "0.25" # Number of regions # Use grids with 1<=region<=NUMBER_OF_REGIONS in the mask -NUMBER_OF_REGIONS=4 # default: 4 +NUMBER_OF_REGIONS = 4 # default: 4 # Region names -REGION_STR=["WPac","EPac","Atl","Ind"] +REGION_STR = ["WPac", "EPac", "Atl", "Ind"] -TAVE_VAR=os.environ["tave_var"] -QSAT_INT_VAR=os.environ["qsat_int_var"] +TAVE_VAR = os.environ["tave_var"] +QSAT_INT_VAR = os.environ["qsat_int_var"] # Use 1:tave, or 2:qsat_int as Bulk Tropospheric Temperature Measure -BULK_TROPOSPHERIC_TEMPERATURE_MEASURE=int(os.environ["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"]) +BULK_TROPOSPHERIC_TEMPERATURE_MEASURE = int(os.environ["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"]) # Directory & Filename for saving binned results (netCDF4) # tave or qsat_int will be appended to BIN_OUTPUT_FILENAME -BIN_OUTPUT_DIR=os.environ["WORK_DIR"]+"/model/netCDF" -BIN_OUTPUT_FILENAME=os.environ["CASENAME"]+".convecTransBasic" - -if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - TEMP_VAR=TAVE_VAR - TEMP_VAR_STR="$\widehat{T}$" - TEMP_UNITS="(K)" -elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - TEMP_VAR=QSAT_INT_VAR - TEMP_VAR_STR="$\widehat{q_{sat}}$" - TEMP_UNITS="(mm)" -BIN_OUTPUT_FILENAME+="_"+TEMP_VAR +BIN_OUTPUT_DIR = os.environ["WORK_DIR"] + "/model/netCDF" +BIN_OUTPUT_FILENAME = os.environ["CASENAME"] + ".convecTransBasic" + +if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + TEMP_VAR = TAVE_VAR + TEMP_VAR_STR = "${T}$" + TEMP_UNITS = "(K)" +elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + TEMP_VAR = QSAT_INT_VAR + TEMP_VAR_STR = "${q_{sat}}$" + TEMP_UNITS = "(mm)" +BIN_OUTPUT_FILENAME += "_" + TEMP_VAR # List binned data file (with filename corresponding to casename) -bin_output_list=sorted(glob.glob(BIN_OUTPUT_DIR+"/"+BIN_OUTPUT_FILENAME+".nc")) +bin_output_list = sorted(glob.glob(BIN_OUTPUT_DIR + "/" + BIN_OUTPUT_FILENAME + ".nc")) # Directory & Filename for saving figures # convecTransCriticalCollapse.py generates 2 sets figures for MODEL -FIG_OUTPUT_DIR=os.environ["WORK_DIR"] + "/model/PS" +FIG_OUTPUT_DIR = os.environ["WORK_DIR"] + "/model/PS" # Figure filename for Convective Transition Statistics (CTS) # collapsed by shifting CWV by Critical CWV -FIG_FILENAME_CTS=os.environ["CASENAME"]+".convecTransCriticalCollapse_stats"+"_"+TEMP_VAR+".eps" +FIG_FILENAME_CTS = os.environ["CASENAME"] + ".convecTransCriticalCollapse_stats" + "_" + TEMP_VAR + ".eps" # Figure filename for Critical CWV -FIG_FILENAME_WC=os.environ["CASENAME"]+".convecTransCriticalCollapse_wc"+"_"+TEMP_VAR+".eps" - -## Binned data filename & figure directory/filename for OBS (default: R2TMIv7) ## -OBS="Reanalysis-2 + TMIv7r1" # will show up in the OBS figure -bin_obs_list=sorted(glob.glob(os.environ["OBS_DATA"]\ - +"/convecTransBasic_R2TMIv7r1_200206_201405_res="\ - +RES+"_fillNrCWV_"\ - +TEMP_VAR+".nc")) +FIG_FILENAME_WC = os.environ["CASENAME"] + ".convecTransCriticalCollapse_wc" + "_" + TEMP_VAR + ".eps" + +# Binned data filename & figure directory/filename for OBS (default: R2TMIv7) ## +OBS = "Reanalysis-2 + TMIv7r1" # will show up in the OBS figure +bin_obs_list = sorted(glob.glob(os.environ["OBS_DATA"] + + "/convecTransBasic_R2TMIv7r1_200206_201405_res=" + + RES + "_fillNrCWV_" + + TEMP_VAR + ".nc")) # convecTransCriticalCollapse.py generates 2 sets figures for OBS too -FIG_OBS_DIR=os.environ["WORK_DIR"] + "/obs/PS" -FIG_OBS_FILENAME_CTS="convecTransCriticalCollapse_stats_R2TMIv7r1_200206_201405_res="\ - +RES+"_fillNrCWV_"+TEMP_VAR+".eps" -FIG_OBS_FILENAME_WC="convecTransCriticalCollapse_wc_R2TMIv7r1_200206_201405_res="\ - +RES+"_fillNrCWV_"+TEMP_VAR+".eps" +FIG_OBS_DIR = os.environ["WORK_DIR"] + "/obs/PS" +FIG_OBS_FILENAME_CTS = "convecTransCriticalCollapse_stats_R2TMIv7r1_200206_201405_res=" \ + + RES + "_fillNrCWV_" + TEMP_VAR + ".eps" +FIG_OBS_FILENAME_WC = "convecTransCriticalCollapse_wc_R2TMIv7r1_200206_201405_res=" \ + + RES + "_fillNrCWV_" + TEMP_VAR + ".eps" # Don't fit/plot bins with PDFPDF_THRESHOLD pdf_gt_th = numpy.zeros(PDF.shape) with numpy.errstate(invalid="ignore"): pdf_gt_th[PDF > PDF_THRESHOLD] = 1 - P[pdf_gt_th == 0]=numpy.nan - CP[pdf_gt_th == 0]=numpy.nan + P[pdf_gt_th == 0] = numpy.nan + CP[pdf_gt_th == 0] = numpy.nan PDF = numpy.copy(PDF) - PDF_pe = PDF*CP + PDF_pe = PDF * CP # Indicator of (temp,reg) with wide CWV range # & other criteria specified below # i.e., t_reg_I will be further modified below - t_reg_I = (numpy.squeeze(numpy.sum(pdf_gt_th, axis=1))*CWV_BIN_WIDTH > CWV_RANGE_THRESHOLD) + t_reg_I = (numpy.squeeze(numpy.sum(pdf_gt_th, axis=1)) * CWV_BIN_WIDTH > CWV_RANGE_THRESHOLD) # Connected Component Section # The CWV_RANGE_THRESHOLD-Criterion must be satisfied by a connected component @@ -155,52 +156,53 @@ def convecTransCriticalCollapse_fitCritical(argsv1, *argsv2): for Tidx in numpy.arange(P0.shape[2]): if t_reg_I[reg, Tidx]: dg = networkx.DiGraph() - for cwv_idx in numpy.arange(pdf_gt_th.shape[1]-1): - if pdf_gt_th[reg, cwv_idx, Tidx] > 0 and pdf_gt_th[reg, cwv_idx+1, Tidx] > 0: - networkx.add_path(dg, [cwv_idx, cwv_idx+1]) + for cwv_idx in numpy.arange(pdf_gt_th.shape[1] - 1): + if pdf_gt_th[reg, cwv_idx, Tidx] > 0 and pdf_gt_th[reg, cwv_idx + 1, Tidx] > 0: + networkx.add_path(dg, [cwv_idx, cwv_idx + 1]) largest = max((dg.subgraph(c) for c in networkx.weakly_connected_components(dg)), key=len) bcc = largest.nodes() # Biggest Connected Component - if sum(pdf_gt_th[reg, bcc, Tidx])*CWV_BIN_WIDTH>CWV_RANGE_THRESHOLD: + if sum(pdf_gt_th[reg, bcc, Tidx]) * CWV_BIN_WIDTH > CWV_RANGE_THRESHOLD: t_reg_I[reg, Tidx] = True pdf_gt_th[reg, :, Tidx] = 0 pdf_gt_th[reg, bcc, Tidx] = 1 else: - t_reg_I[reg,Tidx]=False - pdf_gt_th[reg,:,Tidx]=0 + t_reg_I[reg, Tidx] = False + pdf_gt_th[reg, :, Tidx] = 0 # End of Connected Component Section # # Copy P, CP into p, cp for (temp,reg) with "wide CWV range" & "large PDF" - p=numpy.zeros(P.shape) - cp=numpy.zeros(P.shape) + p = numpy.zeros(P.shape) + cp = numpy.zeros(P.shape) for reg in numpy.arange(P.shape[0]): for Tidx in numpy.arange(P.shape[2]): - if t_reg_I[reg,Tidx]: - p[reg,:,Tidx]=numpy.copy(P[reg,:,Tidx]) - cp[reg,:,Tidx]=numpy.copy(CP[reg,:,Tidx]) - p[pdf_gt_th==0]=numpy.nan - cp[pdf_gt_th==0]=numpy.nan + if t_reg_I[reg, Tidx]: + p[reg, :, Tidx] = numpy.copy(P[reg, :, Tidx]) + cp[reg, :, Tidx] = numpy.copy(CP[reg, :, Tidx]) + p[pdf_gt_th == 0] = numpy.nan + cp[pdf_gt_th == 0] = numpy.nan # Discard (temp,reg) if conditional probability < CP_THRESHOLD for reg in numpy.arange(P.shape[0]): for Tidx in numpy.arange(P.shape[2]): - if t_reg_I[reg,Tidx] and cp[reg,:,Tidx][cp[reg,:,Tidx]>=0.0].size>0: - if numpy.max(cp[reg,:,Tidx][cp[reg,:,Tidx]>=0])= 0.0].size > 0: + if numpy.max(cp[reg, :, Tidx][cp[reg, :, Tidx] >= 0]) < CP_THRESHOLD: + t_reg_I[reg, Tidx] = False else: - t_reg_I[reg,Tidx]=False + t_reg_I[reg, Tidx] = False # Find reference CWV (wr) at which P (or p1) equals PRECIP_REF - wr=numpy.zeros(t_reg_I.shape) + wr = numpy.zeros(t_reg_I.shape) for reg in numpy.arange(t_reg_I.shape[0]): for Tidx in numpy.arange(t_reg_I.shape[1]): - if t_reg_I[reg,Tidx]: - p_gt_pref=p[reg,:,Tidx]>PRECIP_REF + if t_reg_I[reg, Tidx]: + p_gt_pref = p[reg, :, Tidx] > PRECIP_REF if numpy.nonzero(p_gt_pref)[0].size > 0: # p_gt_pref non-empty - wr_idx=numpy.nonzero(p_gt_pref)[0][0] - wr_idx -= (p[reg, wr_idx, Tidx]-PRECIP_REF)/(p[reg,wr_idx,Tidx]-p[reg,wr_idx-1,Tidx]) - wr[reg,Tidx]=(wr_idx+1)*CWV_BIN_WIDTH + wr_idx = numpy.nonzero(p_gt_pref)[0][0] + wr_idx -= (p[reg, wr_idx, Tidx] - PRECIP_REF) / ( + p[reg, wr_idx, Tidx] - p[reg, wr_idx - 1, Tidx]) + wr[reg, Tidx] = (wr_idx + 1) * CWV_BIN_WIDTH else: #p1PRECIP_FIT_MIN)*(p_mp3t1: # Fitting requires at least 2 points - fitResult=numpy.polyfit(cwvRange[fitRange],p_mp3t[fitRange],1) - wc[reg,:]=wr[reg,:]-fitResult[1]/fitResult[0] # wc=wr-(wr-wc) - al[reg]=fitResult[0] - else: # Can't fit - wc[reg,:]=numpy.nan - al[reg]=numpy.nan - - return t_reg_I,wc,al,TEMP_MIN,TEMP_MAX,P,CP,PDF,PDF_pe - - else: # binned data doesn't exist + fitRange = ((p_mp3t > PRECIP_FIT_MIN) * (p_mp3t < PRECIP_FIT_MAX)) + if numpy.nonzero(fitRange)[0].size > 1: # Fitting requires at least 2 points + fitResult = numpy.polyfit(cwvRange[fitRange], p_mp3t[fitRange], 1) + wc[reg, :] = wr[reg, :] - fitResult[1] / fitResult[0] # wc=wr-(wr-wc) + al[reg] = fitResult[0] + else: # Can't fit + wc[reg, :] = numpy.nan + al[reg] = numpy.nan + + return t_reg_I, wc, al, TEMP_MIN, TEMP_MAX, P, CP, PDF, PDF_pe + + else: # binned data doesn't exist return (numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([]), numpy.array([])) + # ====================================================================== # convecTransCriticalCollapse_plot # plot two sets for figures for MODEL & OBS # (whenever binned output files exist) -def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): - +def convecTransCriticalCollapse_plot(argsv1, argsv2, argsv3, argsv4, argsv5, argsv6): print("Plotting...") - cwv_bin_center,\ - temp_bin_center,\ - P0,\ - P1,\ - P2,\ - PE,\ - Q0,\ - Q1,\ - CBW,\ - PT=argsv1 - - t_reg_I,\ - wc,\ - al,\ - TEMP_MIN,\ - TEMP_MAX,\ - p1,\ - cp,\ - pdf,\ - pdf_pe=argsv2 - - cwv_bin_center_obs,\ - temp_bin_center_obs,\ - P0_obs,\ - P1_obs,\ - P2_obs,\ - PE_obs,\ - Q0_obs,\ - Q1_obs,\ - CBW_obs,\ - PT_obs=argsv3 - - t_reg_I_obs,\ - wc_obs,\ - al_obs,\ - TEMP_MIN_obs,\ - TEMP_MAX_obs,\ - p1_obs,\ - cp_obs,\ - pdf_obs,\ - pdf_pe_obs=argsv4 - - NUMBER_OF_REGIONS,\ - REGION_STR,\ - FIG_OUTPUT_DIR,\ - FIG_FILENAME_CTS,\ - FIG_FILENAME_WC,\ - MODEL,\ - FIG_OBS_DIR,\ - FIG_OBS_FILENAME_CTS,\ - FIG_OBS_FILENAME_WC,\ - OBS,\ - RES,\ - USE_SAME_COLOR_MAP,\ - OVERLAY_OBS_ON_TOP_OF_MODEL_FIG,\ - BULK_TROPOSPHERIC_TEMPERATURE_MEASURE=argsv5 - - fig_params=argsv6 - - if p1_obs.size!=0: + cwv_bin_center, \ + temp_bin_center, \ + P0, \ + P1, \ + P2, \ + PE, \ + Q0, \ + Q1, \ + CBW, \ + PT = argsv1 + + t_reg_I, \ + wc, \ + al, \ + TEMP_MIN, \ + TEMP_MAX, \ + p1, \ + cp, \ + pdf, \ + pdf_pe = argsv2 + + cwv_bin_center_obs, \ + temp_bin_center_obs, \ + P0_obs, \ + P1_obs, \ + P2_obs, \ + PE_obs, \ + Q0_obs, \ + Q1_obs, \ + CBW_obs, \ + PT_obs = argsv3 + + t_reg_I_obs, \ + wc_obs, \ + al_obs, \ + TEMP_MIN_obs, \ + TEMP_MAX_obs, \ + p1_obs, \ + cp_obs, \ + pdf_obs, \ + pdf_pe_obs = argsv4 + + NUMBER_OF_REGIONS, \ + REGION_STR, \ + FIG_OUTPUT_DIR, \ + FIG_FILENAME_CTS, \ + FIG_FILENAME_WC, \ + MODEL, \ + FIG_OBS_DIR, \ + FIG_OBS_FILENAME_CTS, \ + FIG_OBS_FILENAME_WC, \ + OBS, \ + RES, \ + USE_SAME_COLOR_MAP, \ + OVERLAY_OBS_ON_TOP_OF_MODEL_FIG, \ + BULK_TROPOSPHERIC_TEMPERATURE_MEASURE = argsv5 + + fig_params = argsv6 + + if p1_obs.size != 0: # ====================================================================== # ======================Start Plot OBS Binned Data====================== # ====================================================================== - NoC=TEMP_MAX_obs-TEMP_MIN_obs+1 # Number of Colors - scatter_colors = cm.jet(numpy.linspace(0,1,NoC,endpoint=True)) + NoC = TEMP_MAX_obs - TEMP_MIN_obs + 1 # Number of Colors + scatter_colors = cm.jet(numpy.linspace(0, 1, NoC, endpoint=True)) axes_fontsize, legend_fonsize, marker_size, xtick_pad, figsize1, figsize2 = fig_params['f0'] @@ -333,99 +335,103 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): ##### Figure Convective Transition Statistics (CTS) ##### # create figure canvas - fig_obs_cts = mp.figure(figsize=(figsize1,figsize2)) + fig_obs_cts = mp.figure(figsize=(figsize1, figsize2)) fig_obs_cts.suptitle('Convective Transition Collapsed Statistics' - '('+OBS+', '+RES+'$^{\circ}$)', y=1.02, fontsize=16) ##Change y=1.04 to 1.02 for Python3. + '(' + OBS + ', ' + RES + '$^{\circ}$)', y=1.02, + fontsize=16) ##Change y=1.04 to 1.02 for Python3. for reg in numpy.arange(NUMBER_OF_REGIONS): # create figure 1 - ax1 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS,4,1+reg*NUMBER_OF_REGIONS) + ax1 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg * NUMBER_OF_REGIONS) ax1.set_xlim(fig_params['f1'][0]) ax1.set_ylim(fig_params['f1'][1]) ax1.set_xticks(fig_params['f1'][4]) ax1.set_yticks(fig_params['f1'][5]) ax1.tick_params(labelsize=axes_fontsize) ax1.tick_params(axis="x", pad=10) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax1.scatter(cwv_bin_center_obs-wc_obs[reg,Tidx],p1_obs[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3, + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax1.scatter(cwv_bin_center_obs - wc_obs[reg, Tidx], p1_obs[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, label="{:.0f}".format(temp_bin_center_obs[Tidx])) - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax1.scatter(cwv_bin_center_obs-wc_obs[reg,Tidx],p1_obs[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3, + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax1.scatter(cwv_bin_center_obs - wc_obs[reg, Tidx], p1_obs[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, label="{:.1f}".format(temp_bin_center_obs[Tidx])) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax1.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx]-wc_obs[reg,Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax1.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx] - wc_obs[reg, Tidx], + fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^", - label=': $\widehat{q_{sat}}-w_c$; '\ - +'$\widehat{q_{sat}}$: '\ - 'Column-integrated Saturation Specific Humidity w.r.t. Liquid; '\ - +'$w_c$: Estimated Critical Column Water Vapor.') - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax1.scatter(temp_bin_center_obs[Tidx]-wc_obs[reg,Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + label=': $\widehat{q_{sat}}-w_c$; ' \ + + '$\widehat{q_{sat}}$: ' \ + 'Column-integrated Saturation Specific Humidity w.r.t. Liquid; ' \ + + '$w_c$: Estimated Critical Column Water Vapor.') + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax1.scatter(temp_bin_center_obs[Tidx] - wc_obs[reg, Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^", - label=': $\widehat{q_{sat}}-w_c$; '\ - +'$\widehat{q_{sat}}$:' - ' Column-integrated Saturation Specific Humidity w.r.t. Liquid; '\ - +'$w_c$: Estimated Critical Column Water Vapor.') - ax1.plot(numpy.arange(0.,fig_params['f1'][0][1],0.1), - al_obs[reg]*numpy.arange(0.,fig_params['f1'][0][1],0.1), - '--',color='0.5', zorder=4) + label=': $\widehat{q_{sat}}-w_c$; ' \ + + '$\widehat{q_{sat}}$:' + ' Column-integrated Saturation Specific Humidity w.r.t. Liquid; ' \ + + '$w_c$: Estimated Critical Column Water Vapor.') + ax1.plot(numpy.arange(0., fig_params['f1'][0][1], 0.1), + al_obs[reg] * numpy.arange(0., fig_params['f1'][0][1], 0.1), + '--', color='0.5', zorder=4) ax1.set_xlabel(fig_params['f1'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f1'][3], fontsize=axes_fontsize) - ax1.text(0.4, 0.95, "Slope="+"{:.2f}".format(al_obs[reg]) , transform=ax1.transAxes, fontsize=12, verticalalignment="top") + ax1.text(0.4, 0.95, "Slope=" + "{:.2f}".format(al_obs[reg]), transform=ax1.transAxes, fontsize=12, + verticalalignment="top") ax1.grid() ax1.grid(visible=True, which='minor', color='0.8', linestyle='-') ax1.set_axisbelow(True) handles, labels = ax1.get_legend_handles_labels() - num_handles = sum(t_reg_I_obs[reg,:]) + num_handles = sum(t_reg_I_obs[reg, :]) leg = ax1.legend(handles[0:num_handles], labels[0:num_handles], fontsize=axes_fontsize, - bbox_to_anchor = (0.05, 0.95), + bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.1, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax1.add_artist(leg) - if reg==0: + if reg == 0: ax1.text(s='Precip. cond. avg. on CWV', x=0.5, y=1.05, transform=ax1.transAxes, fontsize=12, ha='center', va='bottom') # create figure 2 (probability pickup) - ax2 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS,4,2+reg*NUMBER_OF_REGIONS) + ax2 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS, 4, 2 + reg * NUMBER_OF_REGIONS) ax2.set_xlim(fig_params['f2'][0]) ax2.set_ylim(fig_params['f2'][1]) ax2.set_xticks(fig_params['f2'][4]) ax2.set_yticks(fig_params['f2'][5]) ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - ax2.scatter(cwv_bin_center_obs-wc_obs[reg,Tidx],cp_obs[reg,:,Tidx], - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + ax2.scatter(cwv_bin_center_obs - wc_obs[reg, Tidx], cp_obs[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax2.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx]-wc_obs[reg,Tidx],fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax2.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx] - wc_obs[reg, Tidx], + fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax2.scatter(temp_bin_center_obs[Tidx] - wc_obs[reg, Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^") - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax2.scatter(temp_bin_center_obs[Tidx]-wc_obs[reg,Tidx],fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") ax2.set_xlabel(fig_params['f2'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f2'][3], fontsize=axes_fontsize) ax2.text(0.05, 0.95, REGION_STR[reg], transform=ax2.transAxes, @@ -433,36 +439,37 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): ax2.grid() ax2.grid(visible=True, which='minor', color='0.8', linestyle='-') ax2.set_axisbelow(True) - if reg==0: - ax2.text(s='Prob. of Precip.>'+str(PT_obs)+'mm/h', x=0.5, y=1.05, + if reg == 0: + ax2.text(s='Prob. of Precip.>' + str(PT_obs) + 'mm/h', x=0.5, y=1.05, transform=ax2.transAxes, fontsize=12, ha='center', va='bottom') # create figure 3 (normalized PDF) - ax3 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS,4,3+reg*NUMBER_OF_REGIONS) + ax3 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS, 4, 3 + reg * NUMBER_OF_REGIONS) ax3.set_yscale("log") ax3.set_xlim(fig_params['f3'][0]) ax3.set_ylim(fig_params['f3'][1]) ax3.set_xticks(fig_params['f3'][4]) ax3.tick_params(labelsize=axes_fontsize) ax3.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - PDFNormalizer=pdf_obs[reg,numpy.where(cwv_bin_center_obs<=wc_obs[reg,Tidx])[0][-1],Tidx] - ax3.scatter(cwv_bin_center_obs-wc_obs[reg,Tidx],pdf_obs[reg,:,Tidx]/PDFNormalizer, - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + PDFNormalizer = pdf_obs[reg, numpy.where(cwv_bin_center_obs <= wc_obs[reg, Tidx])[0][-1], Tidx] + ax3.scatter(cwv_bin_center_obs - wc_obs[reg, Tidx], pdf_obs[reg, :, Tidx] / PDFNormalizer, + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax3.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx]-wc_obs[reg, Tidx], fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax3.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx] - wc_obs[reg, Tidx], + fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax3.scatter(temp_bin_center_obs[Tidx] - wc_obs[reg, Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^") - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax3.scatter(temp_bin_center_obs[Tidx]-wc_obs[reg, Tidx], fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=3,marker="^") ax3.set_xlabel(fig_params['f3'][2], fontsize=axes_fontsize) ax3.set_ylabel(fig_params['f3'][3], fontsize=axes_fontsize) ax3.grid() @@ -473,49 +480,49 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): fontsize=12, ha='center', va='bottom') # create figure 4 (normalized PDF - precipitation) - ax4 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg*NUMBER_OF_REGIONS) + ax4 = fig_obs_cts.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg * NUMBER_OF_REGIONS) ax4.set_yscale("log") ax4.set_xlim(fig_params['f4'][0]) ax4.set_ylim(fig_params['f4'][1]) ax4.set_xticks(fig_params['f4'][4]) ax4.tick_params(labelsize=axes_fontsize) ax4.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - PDFNormalizer=pdf_obs[reg, numpy.where(cwv_bin_center_obs <= wc_obs[reg,Tidx])[0][-1], Tidx] - ax4.scatter(cwv_bin_center_obs-wc_obs[reg,Tidx],pdf_pe_obs[reg, :, Tidx]/PDFNormalizer, - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + PDFNormalizer = pdf_obs[reg, numpy.where(cwv_bin_center_obs <= wc_obs[reg, Tidx])[0][-1], Tidx] + ax4.scatter(cwv_bin_center_obs - wc_obs[reg, Tidx], pdf_pe_obs[reg, :, Tidx] / PDFNormalizer, + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3) - for Tidx in numpy.arange(TEMP_MIN_obs,TEMP_MAX_obs+1): - if t_reg_I_obs[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax4.scatter(Q1_obs[reg,Tidx]/Q0_obs[reg,Tidx]-wc_obs[reg, Tidx], - fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + for Tidx in numpy.arange(TEMP_MIN_obs, TEMP_MAX_obs + 1): + if t_reg_I_obs[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax4.scatter(Q1_obs[reg, Tidx] / Q0_obs[reg, Tidx] - wc_obs[reg, Tidx], + fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^") - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax4.scatter(temp_bin_center_obs[Tidx]-wc_obs[reg,Tidx], fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax4.scatter(temp_bin_center_obs[Tidx] - wc_obs[reg, Tidx], fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^") ax4.set_xlabel(fig_params['f4'][2], fontsize=axes_fontsize) ax4.set_ylabel(fig_params['f4'][3], fontsize=axes_fontsize) - ax4.text(0.05, 0.95, "Precip > "+str(PT_obs)+" mm h$^-$$^1$", + ax4.text(0.05, 0.95, "Precip > " + str(PT_obs) + " mm h$^-$$^1$", transform=ax4.transAxes, fontsize=12, verticalalignment="top") ax4.grid() ax4.grid(visible=True, which='minor', color='0.8', linestyle='-') ax4.set_axisbelow(True) - if reg==0: - ax4.text(s='PDF of CWV for Precip.>'+str(PT_obs)+'mm/hr', x=0.49, y=1.05, + if reg == 0: + ax4.text(s='PDF of CWV for Precip.>' + str(PT_obs) + 'mm/hr', x=0.49, y=1.05, transform=ax4.transAxes, fontsize=12, ha='center', va='bottom') - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - temp_str='$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)' \ - ' used as the bulk tropospheric temperature measure' - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - temp_str='$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ - 'used as the bulk tropospheric temperature measure' + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + temp_str = '$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)' \ + ' used as the bulk tropospheric temperature measure' + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + temp_str = '$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ + 'used as the bulk tropospheric temperature measure' fig_obs_cts.text(s=temp_str, x=0, y=0, ha='left', va='top', transform=fig_obs_cts.transFigure, fontsize=12) triag_qsat_str = '$\Delta$: $\widehat{q_{sat}}-w_c$;' \ @@ -528,44 +535,44 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): # set layout to tight (so that space between figures is minimized) fig_obs_cts.tight_layout() - fig_obs_cts.savefig(FIG_OBS_DIR+"/"+FIG_OBS_FILENAME_CTS, bbox_inches="tight") + fig_obs_cts.savefig(FIG_OBS_DIR + "/" + FIG_OBS_FILENAME_CTS, bbox_inches="tight") # Figure Critical CWV (WC) ##### - fig_obs_wc = mp.figure(figsize=(figsize1/1.5, figsize2/2.6)) + fig_obs_wc = mp.figure(figsize=(figsize1 / 1.5, figsize2 / 2.6)) fig_obs_wc.suptitle('Critical CWV, Col. Satn., & Critical Col. RH (' + OBS + ', ' + RES + '$^{\circ}$)', y=1.02, fontsize=16) - reg_color=[-1,-2,-3,0] + reg_color = [-1, -2, -3, 0] # create figure 5: wc - ax1 = fig_obs_wc.add_subplot(1,2,1) + ax1 = fig_obs_wc.add_subplot(1, 2, 1) ax1.set_xlim(fig_params['f5'][0]) ax1.set_ylim(fig_params['f5'][1]) ax1.set_xticks(fig_params['f5'][4]) ax1.set_yticks(fig_params['f5'][5]) ax1.tick_params(labelsize=axes_fontsize) ax1.tick_params(axis="x", pad=10) - ax1.set_aspect(float(fig_params['f5'][0][1]-fig_params['f5'][0][0])/float(fig_params['f5'][1][1] - -fig_params['f5'][1][0])) + ax1.set_aspect(float(fig_params['f5'][0][1] - fig_params['f5'][0][0]) / float(fig_params['f5'][1][1] + - fig_params['f5'][1][0])) for reg in numpy.arange(NUMBER_OF_REGIONS): - ax1.plot(temp_bin_center_obs,wc_obs[reg,:],'-',color=scatter_colors[reg_color[reg],:]) - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: + ax1.plot(temp_bin_center_obs, wc_obs[reg, :], '-', color=scatter_colors[reg_color[reg], :]) + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): warnings.simplefilter("ignore") - ax1.plot(temp_bin_center_obs,Q1_obs[reg,:]/Q0_obs[reg,:], '-', - color=scatter_colors[reg_color[reg],:]) - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax1.plot(temp_bin_center_obs,temp_bin_center_obs,'-',color='0.4') - ax1.scatter(temp_bin_center_obs,wc_obs[reg,:],color=scatter_colors[reg_color[reg],:], - s=marker_size,clip_on=True,zorder=3,label=REGION_STR[reg]) + ax1.plot(temp_bin_center_obs, Q1_obs[reg, :] / Q0_obs[reg, :], '-', + color=scatter_colors[reg_color[reg], :]) + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax1.plot(temp_bin_center_obs, temp_bin_center_obs, '-', color='0.4') + ax1.scatter(temp_bin_center_obs, wc_obs[reg, :], color=scatter_colors[reg_color[reg], :], + s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) handles, labels = ax1.get_legend_handles_labels() - leg = ax1.legend(handles, labels, fontsize=axes_fontsize, bbox_to_anchor=(0.05,0.95), + leg = ax1.legend(handles, labels, fontsize=axes_fontsize, bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.5, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax1.text(0.3, 0.2, OBS, transform=ax1.transAxes, fontsize=12, fontweight="bold", verticalalignment="top") - ax1.text(0.3, 0.1, RES+"$^{\circ}$", transform=ax1.transAxes, fontsize=12, + ax1.text(0.3, 0.1, RES + "$^{\circ}$", transform=ax1.transAxes, fontsize=12, fontweight="bold", verticalalignment="top") ax1.set_xlabel(fig_params['f5'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f5'][3], fontsize=axes_fontsize) @@ -575,37 +582,37 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): fontsize=12, ha='center', va='bottom') # create figure 6: wc/qsat_int - ax2 = fig_obs_wc.add_subplot(1,2,2) + ax2 = fig_obs_wc.add_subplot(1, 2, 2) ax2.set_xlim(fig_params['f6'][0]) ax2.set_ylim(fig_params['f6'][1]) ax2.set_xticks(fig_params['f6'][4]) ax2.set_yticks(fig_params['f6'][5]) ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=10) - ax2.set_aspect(float(fig_params['f6'][0][1]-fig_params['f5'][0][0])/float(fig_params['f6'][1][1] - -fig_params['f6'][1][0])) + ax2.set_aspect(float(fig_params['f6'][0][1] - fig_params['f5'][0][0]) / float(fig_params['f6'][1][1] + - fig_params['f6'][1][0])) for reg in numpy.arange(NUMBER_OF_REGIONS): - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): warnings.simplefilter("ignore") - ax2.plot(temp_bin_center_obs,wc_obs[reg,:]/(Q1_obs[reg,:]/Q0_obs[reg,:]),'-', - color=scatter_colors[reg_color[reg],:]) - ax2.scatter(temp_bin_center_obs,wc_obs[reg,:]/(Q1_obs[reg,:]/Q0_obs[reg,:]), - color=scatter_colors[reg_color[reg],:], - s=marker_size,clip_on=True,zorder=3,label=REGION_STR[reg]) - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax2.plot(temp_bin_center_obs,wc_obs[reg,:]/temp_bin_center_obs,'-', - color=scatter_colors[reg_color[reg],:]) - ax2.scatter(temp_bin_center_obs,wc_obs[reg,:]/temp_bin_center_obs, - color=scatter_colors[reg_color[reg],:], + ax2.plot(temp_bin_center_obs, wc_obs[reg, :] / (Q1_obs[reg, :] / Q0_obs[reg, :]), '-', + color=scatter_colors[reg_color[reg], :]) + ax2.scatter(temp_bin_center_obs, wc_obs[reg, :] / (Q1_obs[reg, :] / Q0_obs[reg, :]), + color=scatter_colors[reg_color[reg], :], + s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax2.plot(temp_bin_center_obs, wc_obs[reg, :] / temp_bin_center_obs, '-', + color=scatter_colors[reg_color[reg], :]) + ax2.scatter(temp_bin_center_obs, wc_obs[reg, :] / temp_bin_center_obs, + color=scatter_colors[reg_color[reg], :], s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) handles, labels = ax2.get_legend_handles_labels() leg = ax2.legend(handles, labels, fontsize=axes_fontsize, bbox_to_anchor=(0.6, 0.95), bbox_transform=ax2.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.5, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax2.text(0.15, 0.2, OBS, transform=ax2.transAxes, fontsize=12, fontweight="bold", verticalalignment="top") - ax2.text(0.15, 0.1, RES+"$^{\circ}$", transform=ax2.transAxes, + ax2.text(0.15, 0.1, RES + "$^{\circ}$", transform=ax2.transAxes, fontsize=12, fontweight="bold", verticalalignment="top") ax2.set_xlabel(fig_params['f6'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f6'][3], fontsize=axes_fontsize) @@ -614,39 +621,39 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): ax2.text(s='Critical Col. RH', x=0.5, y=1.02, transform=ax2.transAxes, fontsize=12, ha='center', va='bottom') - footnote_str='Solid line: $\widehat{q_{sat}}$' \ - ' (1000-200hPa Column-integrated Saturation Specific Humidity w.r.t. Liquid)\n' - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - footnote_str+='$\widehat{T}$' \ - ' (1000-200hPa Mass-weighted Column Average Temperature)' \ - ' as the bulk tropospheric temperature measure' - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - footnote_str+='$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ - ' as the bulk tropospheric temperature measure' - footnote_str+='\n$w_c$ estimated by fitting (dashed) the average precip. ' \ - 'pickup curves for the 3 most probable temperature bins' + footnote_str = 'Solid line: $\widehat{q_{sat}}$' \ + ' (1000-200hPa Column-integrated Saturation Specific Humidity w.r.t. Liquid)\n' + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + footnote_str += '$\widehat{T}$' \ + ' (1000-200hPa Mass-weighted Column Average Temperature)' \ + ' as the bulk tropospheric temperature measure' + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + footnote_str += '$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ + ' as the bulk tropospheric temperature measure' + footnote_str += '\n$w_c$ estimated by fitting (dashed) the average precip. ' \ + 'pickup curves for the 3 most probable temperature bins' #ax1.text(s=footnote_str, x=0, y=-0.02, transform=fig_obs_wc.transFigure, ha='left', va='top', fontsize=12) # set layout to tight (so that space between figures is minimized) fig_obs_wc.tight_layout() - fig_obs_wc.savefig(FIG_OBS_DIR+"/"+FIG_OBS_FILENAME_WC, bbox_inches="tight") + fig_obs_wc.savefig(FIG_OBS_DIR + "/" + FIG_OBS_FILENAME_WC, bbox_inches="tight") print("...Completed!") - print(" OBS Figure saved as "+FIG_OBS_DIR+"/"+FIG_OBS_FILENAME_CTS+"!") - print(" OBS Figure saved as "+FIG_OBS_DIR+"/"+FIG_OBS_FILENAME_WC+"!") + print(" OBS Figure saved as " + FIG_OBS_DIR + "/" + FIG_OBS_FILENAME_CTS + "!") + print(" OBS Figure saved as " + FIG_OBS_DIR + "/" + FIG_OBS_FILENAME_WC + "!") # ====================================================================== # =======================End Plot OBS Binned Data======================= # ====================================================================== # Use OBS to set colormap (but if they don't exist or users don't want to...) - if p1_obs.siz == 0 or not USE_SAME_COLOR_MAP: + if p1_obs.size == 0 or not USE_SAME_COLOR_MAP: TEMP_MIN_obs = TEMP_MIN TEMP_MAX_obs = TEMP_MAX # ====================================================================== # =====================Start Plot MODEL Binned Data===================== # ====================================================================== - NoC = TEMP_MAX_obs-TEMP_MIN_obs + 1 # Number of Colors + NoC = TEMP_MAX_obs - TEMP_MIN_obs + 1 # Number of Colors scatter_colors = cm.jet(numpy.linspace(0, 1, NoC, endpoint=True)) axes_fontsize, legend_fonsize, marker_size, xtick_pad, figsize1, figsize2 = fig_params['f0'] @@ -656,12 +663,12 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): # Figure Convective Transition Statistics (CTS) ##### # create figure canvas fig_cts = mp.figure(figsize=(figsize1, figsize2)) - + fig_cts.suptitle('Convective Transition Collapsed Statistics (' + MODEL + ')', y=1.02, fontsize=16) for reg in numpy.arange(NUMBER_OF_REGIONS): # create figure 1 - ax1 = fig_cts.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg*NUMBER_OF_REGIONS) + ax1 = fig_cts.add_subplot(NUMBER_OF_REGIONS, 4, 1 + reg * NUMBER_OF_REGIONS) ax1.set_xlim(fig_params['f1'][0]) ax1.set_ylim(fig_params['f1'][1]) ax1.set_xticks(fig_params['f1'][4]) @@ -671,81 +678,81 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax1.scatter(cwv_bin_center-wc[reg, Tidx], p1[reg, :, Tidx], - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax1.scatter(cwv_bin_center - wc[reg, Tidx], p1[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, label="{:.0f}".format(temp_bin_center[Tidx])) elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax1.scatter(cwv_bin_center-wc[reg,Tidx],p1[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=3, + ax1.scatter(cwv_bin_center - wc[reg, Tidx], p1[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, label="{:.1f}".format(temp_bin_center[Tidx])) - for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX+1): + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax1.scatter(Q1[reg,Tidx]/Q0[reg,Tidx]-wc[reg,Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], - s=marker_size,clip_on=True,zorder=3,marker='^', - label=': $\widehat{q_{sat}}-w_c$; ' +\ - '$\widehat{q_{sat}}$: Column-integrated Saturation Specific Humidity' +\ - ' w.r.t. Liquid; ' +\ + ax1.scatter(Q1[reg, Tidx] / Q0[reg, Tidx] - wc[reg, Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker='^', + label=': $\widehat{q_{sat}}-w_c$; ' + \ + '$\widehat{q_{sat}}$: Column-integrated Saturation Specific Humidity' + \ + ' w.r.t. Liquid; ' + \ '$w_c$: Estimated Critical Column Water Vapor.') - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax1.scatter(temp_bin_center[Tidx]-wc[reg,Tidx],fig_params['f1'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker='^', - label=': $\widehat{q_{sat}}-w_c$; '\ - +'$\widehat{q_{sat}}$: Column-integrated Saturation Specific Humidity' - ' w.r.t. Liquid; '\ - +'$w_c$: Estimated Critical Column Water Vapor.') - ax1.plot(numpy.arange(0.,fig_params['f1'][0][1],0.1),al[reg]*numpy.arange(0.,fig_params['f1'][0][1],0.1), - '--',color='0.5', zorder=4) + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax1.scatter(temp_bin_center[Tidx] - wc[reg, Tidx], fig_params['f1'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker='^', + label=': $\widehat{q_{sat}}-w_c$; ' \ + + '$\widehat{q_{sat}}$: Column-integrated Saturation Specific Humidity' + ' w.r.t. Liquid; ' \ + + '$w_c$: Estimated Critical Column Water Vapor.') + ax1.plot(numpy.arange(0., fig_params['f1'][0][1], 0.1), al[reg] * numpy.arange(0., fig_params['f1'][0][1], 0.1), + '--', color='0.5', zorder=4) ax1.set_xlabel(fig_params['f1'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f1'][3], fontsize=axes_fontsize) - ax1.text(0.4, 0.95, "Slope="+"{:.2f}".format(al[reg]), transform=ax1.transAxes, + ax1.text(0.4, 0.95, "Slope=" + "{:.2f}".format(al[reg]), transform=ax1.transAxes, fontsize=12, verticalalignment="top") ax1.grid() ax1.grid(visible=True, which='minor', color='0.8', linestyle='-') ax1.set_axisbelow(True) handles, labels = ax1.get_legend_handles_labels() - num_handles = sum(t_reg_I[reg,:]) + num_handles = sum(t_reg_I[reg, :]) leg = ax1.legend(handles[0:num_handles], labels[0:num_handles], fontsize=axes_fontsize, - bbox_to_anchor=(0.05,0.95), + bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.1, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) ax1.add_artist(leg) - if reg==0: + if reg == 0: ax1.text(s='Precip. cond. avg. on CWV', x=0.5, y=1.05, transform=ax1.transAxes, fontsize=12, ha='center', va='bottom') # create figure 2 (probability pickup) - ax2 = fig_cts.add_subplot(NUMBER_OF_REGIONS,4,2+reg*NUMBER_OF_REGIONS) + ax2 = fig_cts.add_subplot(NUMBER_OF_REGIONS, 4, 2 + reg * NUMBER_OF_REGIONS) ax2.set_xlim(fig_params['f2'][0]) ax2.set_ylim(fig_params['f2'][1]) ax2.set_xticks(fig_params['f2'][4]) ax2.set_yticks(fig_params['f2'][5]) ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: - ax2.scatter(cwv_bin_center-wc[reg,Tidx],cp[reg,:,Tidx], - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax2.scatter(Q1[reg,Tidx]/Q0[reg,Tidx]-wc[reg,Tidx],fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax2.scatter(temp_bin_center[Tidx]-wc[reg,Tidx],fig_params['f2'][1][1]*0.98, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: + ax2.scatter(cwv_bin_center - wc[reg, Tidx], cp[reg, :, Tidx], + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax2.scatter(Q1[reg, Tidx] / Q0[reg, Tidx] - wc[reg, Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax2.scatter(temp_bin_center[Tidx] - wc[reg, Tidx], fig_params['f2'][1][1] * 0.98, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") ax2.set_xlabel(fig_params['f2'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f2'][3], fontsize=axes_fontsize) ax2.text(0.05, 0.95, REGION_STR[reg], transform=ax2.transAxes, @@ -753,87 +760,88 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): ax2.grid() ax2.grid(visible=True, which='minor', color='0.8', linestyle='-') ax2.set_axisbelow(True) - if reg==0: - ax2.text(s='Prob. of Precip.>'+str(PT)+'mm/h', x=0.5, y=1.05, + if reg == 0: + ax2.text(s='Prob. of Precip.>' + str(PT) + 'mm/h', x=0.5, y=1.05, transform=ax2.transAxes, fontsize=12, ha='center', va='bottom') # create figure 3 (normalized PDF) - ax3 = fig_cts.add_subplot(NUMBER_OF_REGIONS,4,3+reg*NUMBER_OF_REGIONS) + ax3 = fig_cts.add_subplot(NUMBER_OF_REGIONS, 4, 3 + reg * NUMBER_OF_REGIONS) ax3.set_yscale("log") ax3.set_xlim(fig_params['f3'][0]) ax3.set_ylim(fig_params['f3'][1]) ax3.set_xticks(fig_params['f3'][4]) ax3.tick_params(labelsize=axes_fontsize) ax3.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: - PDFNormalizer=pdf[reg,numpy.where(cwv_bin_center<=wc[reg,Tidx])[0][-1],Tidx] - ax3.scatter(cwv_bin_center-wc[reg,Tidx],pdf[reg,:,Tidx]/PDFNormalizer, - edgecolor="none",facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - ax3.scatter(Q1[reg,Tidx]/Q0[reg,Tidx]-wc[reg,Tidx],fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax3.scatter(temp_bin_center[Tidx]-wc[reg,Tidx],fig_params['f3'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: + PDFNormalizer = pdf[reg, numpy.where(cwv_bin_center <= wc[reg, Tidx])[0][-1], Tidx] + ax3.scatter(cwv_bin_center - wc[reg, Tidx], pdf[reg, :, Tidx] / PDFNormalizer, + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3) + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + ax3.scatter(Q1[reg, Tidx] / Q0[reg, Tidx] - wc[reg, Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax3.scatter(temp_bin_center[Tidx] - wc[reg, Tidx], fig_params['f3'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") ax3.set_xlabel(fig_params['f3'][2], fontsize=axes_fontsize) ax3.set_ylabel(fig_params['f3'][3], fontsize=axes_fontsize) ax3.grid() ax3.grid(visible=True, which='minor', color='0.8', linestyle='-') ax3.set_axisbelow(True) - if reg==0: + if reg == 0: ax3.text(s='PDF of CWV', x=0.5, y=1.05, transform=ax3.transAxes, fontsize=12, ha='center', va='bottom') # create figure 4 (normalized PDF - precipitation) - ax4 = fig_cts.add_subplot(NUMBER_OF_REGIONS,4,4+reg*NUMBER_OF_REGIONS) + ax4 = fig_cts.add_subplot(NUMBER_OF_REGIONS, 4, 4 + reg * NUMBER_OF_REGIONS) ax4.set_yscale("log") ax4.set_xlim(fig_params['f4'][0]) ax4.set_ylim(fig_params['f4'][1]) ax4.set_xticks(fig_params['f4'][4]) ax4.tick_params(labelsize=axes_fontsize) ax4.tick_params(axis="x", pad=xtick_pad) - for Tidx in numpy.arange(TEMP_MIN,TEMP_MAX+1): - if t_reg_I[reg,Tidx]: - PDFNormalizer=pdf[reg, numpy.where(cwv_bin_center <= wc[reg,Tidx])[0][-1], Tidx] - ax4.scatter(cwv_bin_center-wc[reg,Tidx],pdf_pe[reg, :, Tidx]/PDFNormalizer, - edgecolor="none", facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): + if t_reg_I[reg, Tidx]: + PDFNormalizer = pdf[reg, numpy.where(cwv_bin_center <= wc[reg, Tidx])[0][-1], Tidx] + ax4.scatter(cwv_bin_center - wc[reg, Tidx], pdf_pe[reg, :, Tidx] / PDFNormalizer, + edgecolor="none", facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3) for Tidx in numpy.arange(TEMP_MIN, TEMP_MAX + 1): if t_reg_I[reg, Tidx]: if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: - ax4.scatter(Q1[reg,Tidx]/Q0[reg,Tidx]-wc[reg,Tidx],fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC, :], + ax4.scatter(Q1[reg, Tidx] / Q0[reg, Tidx] - wc[reg, Tidx], fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], s=marker_size, clip_on=True, zorder=3, marker="^") elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax4.scatter(temp_bin_center[Tidx]-wc[reg, Tidx], fig_params['f4'][1][1]*0.83, - edgecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:]/2, - facecolor=scatter_colors[(Tidx-TEMP_MIN_obs)%NoC,:], - s=marker_size,clip_on=True,zorder=3,marker="^") + ax4.scatter(temp_bin_center[Tidx] - wc[reg, Tidx], fig_params['f4'][1][1] * 0.83, + edgecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :] / 2, + facecolor=scatter_colors[(Tidx - TEMP_MIN_obs) % NoC, :], + s=marker_size, clip_on=True, zorder=3, marker="^") ax4.set_xlabel(fig_params['f4'][2], fontsize=axes_fontsize) ax4.set_ylabel(fig_params['f4'][3], fontsize=axes_fontsize) - ax4.text(0.05, 0.95, "Precip > "+str(PT)+" mm hr$^-$$^1$", + ax4.text(0.05, 0.95, "Precip > " + str(PT) + " mm hr$^-$$^1$", transform=ax4.transAxes, fontsize=12, verticalalignment="top") ax4.grid() ax4.grid(visible=True, which='minor', color='0.8', linestyle='-') ax4.set_axisbelow(True) - if reg==0: - ax4.text(s='PDF of CWV for Precip.>'+str(PT)+'mm/hr', x=0.49, y=1.05, transform=ax4.transAxes, fontsize=12, + if reg == 0: + ax4.text(s='PDF of CWV for Precip.>' + str(PT) + 'mm/hr', x=0.49, y=1.05, transform=ax4.transAxes, + fontsize=12, ha='center', va='bottom') - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: - temp_str='$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)' \ - ' used as the bulk tropospheric temperature measure' - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - temp_str='$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ - ' used as the bulk tropospheric temperature measure' + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: + temp_str = '$\widehat{T}$ (1000-200hPa Mass-weighted Column Average Temperature)' \ + ' used as the bulk tropospheric temperature measure' + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + temp_str = '$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' \ + ' used as the bulk tropospheric temperature measure' fig_cts.text(s=temp_str, x=0, y=0, ha='left', va='top', transform=fig_cts.transFigure, fontsize=12) triag_qsat_str = '$\Delta$: $\widehat{q_{sat}}-w_c$; $\widehat{q_{sat}}$:' \ @@ -845,53 +853,53 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): # set layout to tight (so that space between figures is minimized) fig_cts.tight_layout() - fig_cts.savefig(FIG_OUTPUT_DIR+"/"+FIG_FILENAME_CTS, bbox_inches="tight") + fig_cts.savefig(FIG_OUTPUT_DIR + "/" + FIG_FILENAME_CTS, bbox_inches="tight") - ##### Figure Critical CWV (WC) ##### - fig_wc = mp.figure(figsize=(figsize1/1.5,figsize2/2.6)) + # Figure Critical CWV (WC) ##### + fig_wc = mp.figure(figsize=(figsize1 / 1.5, figsize2 / 2.6)) - fig_wc.suptitle('Critical CWV, Col. Satn., & Critical Col. RH ('+MODEL+')', y=1.02, fontsize=16) + fig_wc.suptitle('Critical CWV, Col. Satn., & Critical Col. RH (' + MODEL + ')', y=1.02, fontsize=16) - reg_color=[-1,-2,-3,0] + reg_color = [-1, -2, -3, 0] # create figure 5: wc - ax1 = fig_wc.add_subplot(1,2,1) + ax1 = fig_wc.add_subplot(1, 2, 1) ax1.set_xlim(fig_params['f5'][0]) ax1.set_ylim(fig_params['f5'][1]) ax1.set_xticks(fig_params['f5'][4]) ax1.set_yticks(fig_params['f5'][5]) ax1.tick_params(labelsize=axes_fontsize) ax1.tick_params(axis="x", pad=10) - ax1.set_aspect(float(fig_params['f5'][0][1]-fig_params['f5'][0][0])/float(fig_params['f5'][1][1] - -fig_params['f5'][1][0])) + ax1.set_aspect(float(fig_params['f5'][0][1] - fig_params['f5'][0][0]) / float(fig_params['f5'][1][1] + - fig_params['f5'][1][0])) if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size != 0: for reg in numpy.arange(NUMBER_OF_REGIONS): - ax1.plot(temp_bin_center_obs,wc_obs[reg,:],'-',color='0.6') - ax1.scatter(temp_bin_center_obs,wc_obs[reg,:],color='0.6',s=marker_size,clip_on=True,zorder=3) - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: + ax1.plot(temp_bin_center_obs, wc_obs[reg, :], '-', color='0.6') + ax1.scatter(temp_bin_center_obs, wc_obs[reg, :], color='0.6', s=marker_size, clip_on=True, zorder=3) + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): warnings.simplefilter("ignore") - ax1.plot(temp_bin_center_obs,Q1_obs[reg,:]/Q0_obs[reg,:],'-',color='0.6') + ax1.plot(temp_bin_center_obs, Q1_obs[reg, :] / Q0_obs[reg, :], '-', color='0.6') for reg in numpy.arange(NUMBER_OF_REGIONS): - ax1.plot(temp_bin_center,wc[reg, :],'-',color=scatter_colors[reg_color[reg],:]) + ax1.plot(temp_bin_center, wc[reg, :], '-', color=scatter_colors[reg_color[reg], :]) if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): warnings.simplefilter("ignore") - ax1.plot(temp_bin_center,Q1[reg,:]/Q0[reg,:],'-',color=scatter_colors[reg_color[reg],:]) + ax1.plot(temp_bin_center, Q1[reg, :] / Q0[reg, :], '-', color=scatter_colors[reg_color[reg], :]) elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: - ax1.plot(temp_bin_center,temp_bin_center,'-',color='0.4') - ax1.scatter(temp_bin_center,wc[reg,:],color=scatter_colors[reg_color[reg], :], - s=marker_size,clip_on=True,zorder=3, label=REGION_STR[reg]) + ax1.plot(temp_bin_center, temp_bin_center, '-', color='0.4') + ax1.scatter(temp_bin_center, wc[reg, :], color=scatter_colors[reg_color[reg], :], + s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) handles, labels = ax1.get_legend_handles_labels() leg = ax1.legend(handles, labels, fontsize=axes_fontsize, bbox_to_anchor=(0.05, 0.95), bbox_transform=ax1.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.5, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size != 0: ax1.text(0.3, 0.2, OBS, transform=ax1.transAxes, fontsize=12, fontweight="bold", verticalalignment="top", color='0.6') - ax1.text(0.3, 0.1, RES+"$^{\circ}$", transform=ax1.transAxes, + ax1.text(0.3, 0.1, RES + "$^{\circ}$", transform=ax1.transAxes, fontsize=12, fontweight="bold", verticalalignment="top", color='0.6') ax1.set_xlabel(fig_params['f5'][2], fontsize=axes_fontsize) ax1.set_ylabel(fig_params['f5'][3], fontsize=axes_fontsize) @@ -909,40 +917,42 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): ax2.set_yticks(fig_params['f6'][5]) ax2.tick_params(labelsize=axes_fontsize) ax2.tick_params(axis="x", pad=10) - ax2.set_aspect(float(fig_params['f6'][0][1]-fig_params['f5'][0][0])/float(fig_params['f6'][1][1] - -fig_params['f6'][1][0])) - if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size!=0: + ax2.set_aspect(float(fig_params['f6'][0][1] - fig_params['f5'][0][0]) / float(fig_params['f6'][1][1] + - fig_params['f6'][1][0])) + if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size != 0: for reg in numpy.arange(NUMBER_OF_REGIONS): - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): warnings.simplefilter("ignore") - ax2.plot(temp_bin_center_obs, wc_obs[reg,:]/(Q1_obs[reg,:]/Q0_obs[reg,:]),'-',color='0.6') - ax2.scatter(temp_bin_center_obs, wc_obs[reg,:]/(Q1_obs[reg,:]/Q0_obs[reg,:]),color='0.6', - s=marker_size,clip_on=True,zorder=3) - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax2.plot(temp_bin_center_obs,wc_obs[reg,:]/temp_bin_center_obs,'-',color='0.6') - ax2.scatter(temp_bin_center_obs,wc_obs[reg,:]/temp_bin_center_obs,color='0.6', - s=marker_size,clip_on=True,zorder=3) + ax2.plot(temp_bin_center_obs, wc_obs[reg, :] / (Q1_obs[reg, :] / Q0_obs[reg, :]), '-', color='0.6') + ax2.scatter(temp_bin_center_obs, wc_obs[reg, :] / (Q1_obs[reg, :] / Q0_obs[reg, :]), color='0.6', + s=marker_size, clip_on=True, zorder=3) + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax2.plot(temp_bin_center_obs, wc_obs[reg, :] / temp_bin_center_obs, '-', color='0.6') + ax2.scatter(temp_bin_center_obs, wc_obs[reg, :] / temp_bin_center_obs, color='0.6', + s=marker_size, clip_on=True, zorder=3) for reg in numpy.arange(NUMBER_OF_REGIONS): - if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==1: + if BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 1: with warnings.catch_warnings(): - warnings.simplefilter("ignore") - ax2.plot(temp_bin_center ,wc[reg,:]/(Q1[reg, :]/Q0[reg, :]),'-', color=scatter_colors[reg_color[reg], :]) - ax2.scatter(temp_bin_center, wc[reg, :]/(Q1[reg,:]/Q0[reg, :]), color=scatter_colors[reg_color[reg], :], - s=marker_size,clip_on=True,zorder=3,label=REGION_STR[reg]) - elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE==2: - ax2.plot(temp_bin_center, wc[reg, :]/temp_bin_center, '-', color=scatter_colors[reg_color[reg], :]) - ax2.scatter(temp_bin_center, wc[reg, :]/temp_bin_center, color=scatter_colors[reg_color[reg], :], - s=marker_size, clip_on=True,zorder=3, label=REGION_STR[reg]) + warnings.simplefilter("ignore") + ax2.plot(temp_bin_center, wc[reg, :] / (Q1[reg, :] / Q0[reg, :]), '-', + color=scatter_colors[reg_color[reg], :]) + ax2.scatter(temp_bin_center, wc[reg, :] / (Q1[reg, :] / Q0[reg, :]), + color=scatter_colors[reg_color[reg], :], + s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) + elif BULK_TROPOSPHERIC_TEMPERATURE_MEASURE == 2: + ax2.plot(temp_bin_center, wc[reg, :] / temp_bin_center, '-', color=scatter_colors[reg_color[reg], :]) + ax2.scatter(temp_bin_center, wc[reg, :] / temp_bin_center, color=scatter_colors[reg_color[reg], :], + s=marker_size, clip_on=True, zorder=3, label=REGION_STR[reg]) leg = ax2.legend(handles, labels, fontsize=axes_fontsize, bbox_to_anchor=(0.6, 0.95), bbox_transform=ax2.transAxes, loc="upper left", borderaxespad=0, labelspacing=0.5, - fancybox=False,scatterpoints=1, framealpha=0, borderpad=0, + fancybox=False, scatterpoints=1, framealpha=0, borderpad=0, handletextpad=0.1, markerscale=1, ncol=1, columnspacing=0.25) if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size != 0: ax2.text(0.15, 0.2, OBS, transform=ax2.transAxes, fontsize=12, fontweight="bold", - verticalalignment="top",color='0.6') + verticalalignment="top", color='0.6') ax2.text(0.15, 0.1, RES + "$^{\circ}$", transform=ax2.transAxes, fontsize=12, fontweight="bold", - verticalalignment="top",color='0.6') + verticalalignment="top", color='0.6') ax2.set_xlabel(fig_params['f6'][2], fontsize=axes_fontsize) ax2.set_ylabel(fig_params['f6'][3], fontsize=axes_fontsize) ax2.grid() @@ -959,7 +969,7 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): footnote_str += ('$\widehat{q_{sat}}$ (1000-200hPa Column-integrated Saturation Specific Humidity)' ' as the bulk tropospheric temperature measure') footnote_str += ('\n$w_c$ estimated by fitting (dashed) the average precip.' - ' pickup curves for the 3 most probable temperature bins') + ' pickup curves for the 3 most probable temperature bins') if OVERLAY_OBS_ON_TOP_OF_MODEL_FIG and p1_obs.size != 0: footnote_str += ('\nCorresponding results from ' + OBS + ' (spatial resolution: ' + RES + '$^{\circ}$) plotted in gray') @@ -967,7 +977,7 @@ def convecTransCriticalCollapse_plot(argsv1,argsv2,argsv3,argsv4,argsv5,argsv6): # set layout to tight (so that space between figures is minimized) fig_wc.tight_layout() fig_wc.savefig(FIG_OUTPUT_DIR + "/" + FIG_FILENAME_WC, bbox_inches="tight") - + print("...Completed!") print(" MODEL Figure saved as " + FIG_OUTPUT_DIR + "/" + FIG_FILENAME_CTS + "!") print(" MODEL Figure saved as " + FIG_OUTPUT_DIR + "/" + FIG_FILENAME_WC + "!") diff --git a/diagnostics/convective_transition_diag/convective_transition_diag.html b/diagnostics/convective_transition_diag/convective_transition_diag.html index 809fd5ad9..e76ac5975 100644 --- a/diagnostics/convective_transition_diag/convective_transition_diag.html +++ b/diagnostics/convective_transition_diag/convective_transition_diag.html @@ -18,7 +18,7 @@

Convective Transition Statistics

- diff --git a/diagnostics/convective_transition_diag/convective_transition_diag_v2.py b/diagnostics/convective_transition_diag/convective_transition_diag_v2.py index 37ed4b381..9661d4b72 100644 --- a/diagnostics/convective_transition_diag/convective_transition_diag_v2.py +++ b/diagnostics/convective_transition_diag/convective_transition_diag_v2.py @@ -76,7 +76,7 @@ else: raise KeyError( 'Unrecognized BULK_TROPOSPHERIC_TEMPERATURE_VAR = {}'.format( - os.environ.get('BULK_TROPOSPHERIC_TEMPERATURE_VAR', '')) + os.environ.get('BULK_TROPOSPHERIC_TEMPERATURE_VAR', '')) ) os.environ["lev_coord"] = 'lev' @@ -89,7 +89,7 @@ os.environ["qsat_int_file"] = os.environ["QSAT_INT_FILE"] # Model output filename convention -os.environ["MODEL_OUTPUT_DIR"] = os.environ["DATADIR"]+"/1hr" +os.environ["MODEL_OUTPUT_DIR"] = os.environ["DATADIR"] + "/1hr" if not os.path.exists(os.environ["MODEL_OUTPUT_DIR"]): os.makedirs(os.environ["MODEL_OUTPUT_DIR"]) @@ -101,15 +101,15 @@ if len(glob.glob(os.environ["prw_file"])) == 0: print("Required Precipitable Water Vapor (CWV) data missing!") missing_file = 1 -if len(glob.glob(os.environ["ta_file"]))==0: +if len(glob.glob(os.environ["ta_file"])) == 0: if ((os.environ["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"] == "2" and - len(glob.glob(os.environ["qsat_int_file"])) == 0) + len(glob.glob(os.environ["qsat_int_file"])) == 0) or (os.environ["BULK_TROPOSPHERIC_TEMPERATURE_MEASURE"] == "1" and - (len(glob.glob(os.environ["qsat_int_file"])) == 0 or len(glob.glob(os.environ["tave_file"])) == 0))): + (len(glob.glob(os.environ["qsat_int_file"])) == 0 or len(glob.glob(os.environ["tave_file"])) == 0))): print("Required Temperature data missing!") - missing_file=1 + missing_file = 1 -if missing_file==1: +if missing_file == 1: print("Convective Transition Diagnostic Package will NOT be executed!") else: @@ -118,11 +118,11 @@ # Convective Transition Basic Statistics # See convecTransBasic.py for detailed info try: - os.system("python " + os.environ["POD_HOME"]+"/" + "convecTransBasic.py") + os.system("python " + os.environ["POD_HOME"] + "/" + "convecTransBasic.py") except OSError as e: print('WARNING', e.errno, e.strerror) print("**************************************************") - print("Convective Transition Basic Statistics (convecTransBasic.py) is NOT Executed as Expected!") + print("Convective Transition Basic Statistics (convecTransBasic.py) is NOT Executed as Expected!") print("**************************************************") # ====================================================================== diff --git a/diagnostics/example_multicase/container_cat.csv b/diagnostics/example_multicase/container_cat.csv new file mode 100644 index 000000000..d3db31f86 --- /dev/null +++ b/diagnostics/example_multicase/container_cat.csv @@ -0,0 +1,3 @@ +activity_id,branch_method,branch_time_in_child,branch_time_in_parent,experiment,experiment_id,frequency,grid,grid_label,institution_id,nominal_resolution,parent_activity_id,parent_experiment_id,parent_source_id,parent_time_units,parent_variant_label,product,realm,source_id,source_type,sub_experiment,sub_experiment_id,table_id,variable_id,variant_label,member_id,standard_name,long_name,units,vertical_levels,init_year,start_time,end_time,time_range,path,version +CMIP,standard,,,,synthetic,day,,gr,,,CMIP,,,days since 1980-01-01,r1i1p1f1,,atmos,,,none,none,day,tas,r1i1p1f1,r1i1p1f1,air_temperature,Near-Surface Air Temperature,K,1,,1980-01-01,1984-12-31,1980-01-01-1984-12-31,/proj/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19800101-19841231/day/CMIP_Synthetic_r1i1p1f1_gr1_19800101-19841231.tas.day.nc,none +CMIP,standard,,,,synthetic,day,,gr,,,CMIP,,,days since 1985-01-01,r1i1p1f1,,atmos,,,none,none,day,tas,r1i1p1f1,r1i1p1f1,air_temperature,Near-Surface Air Temperature,K,1,,1985-01-01,1989-12-31,1985-01-01-1989-12-31,/proj/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19850101-19891231/day/CMIP_Synthetic_r1i1p1f1_gr1_19850101-19891231.tas.day.nc,none diff --git a/diagnostics/example_multicase/container_cat.json b/diagnostics/example_multicase/container_cat.json new file mode 100644 index 000000000..6917f854e --- /dev/null +++ b/diagnostics/example_multicase/container_cat.json @@ -0,0 +1,181 @@ +{ + "esmcat_version": "0.0.1", + "attributes": [ + { + "column_name": "activity_id", + "vocabulary": "" + }, + { + "column_name": "branch_method", + "vocabulary": "" + }, + { + "column_name": "branch_time_in_child", + "vocabulary": "" + }, + { + "column_name": "branch_time_in_parent", + "vocabulary": "" + }, + { + "column_name": "experiment", + "vocabulary": "" + }, + { + "column_name": "experiment_id", + "vocabulary": "" + }, + { + "column_name": "frequency", + "vocabulary": "" + }, + { + "column_name": "grid", + "vocabulary": "" + }, + { + "column_name": "grid_label", + "vocabulary": "" + }, + { + "column_name": "institution_id", + "vocabulary": "" + }, + { + "column_name": "nominal_resolution", + "vocabulary": "" + }, + { + "column_name": "parent_activity_id", + "vocabulary": "" + }, + { + "column_name": "parent_experiment_id", + "vocabulary": "" + }, + { + "column_name": "parent_source_id", + "vocabulary": "" + }, + { + "column_name": "parent_time_units", + "vocabulary": "" + }, + { + "column_name": "parent_variant_label", + "vocabulary": "" + }, + { + "column_name": "product", + "vocabulary": "" + }, + { + "column_name": "realm", + "vocabulary": "" + }, + { + "column_name": "source_id", + "vocabulary": "" + }, + { + "column_name": "source_type", + "vocabulary": "" + }, + { + "column_name": "sub_experiment", + "vocabulary": "" + }, + { + "column_name": "sub_experiment_id", + "vocabulary": "" + }, + { + "column_name": "table_id", + "vocabulary": "" + }, + { + "column_name": "variable_id", + "vocabulary": "" + }, + { + "column_name": "variant_label", + "vocabulary": "" + }, + { + "column_name": "member_id", + "vocabulary": "" + }, + { + "column_name": "standard_name", + "vocabulary": "" + }, + { + "column_name": "long_name", + "vocabulary": "" + }, + { + "column_name": "units", + "vocabulary": "" + }, + { + "column_name": "vertical_levels", + "vocabulary": "" + }, + { + "column_name": "init_year", + "vocabulary": "" + }, + { + "column_name": "start_time", + "vocabulary": "" + }, + { + "column_name": "end_time", + "vocabulary": "" + }, + { + "column_name": "time_range", + "vocabulary": "" + }, + { + "column_name": "path", + "vocabulary": "" + }, + { + "column_name": "version", + "vocabulary": "" + } + ], + "assets": { + "column_name": "path", + "format": "netcdf", + "format_column_name": null + }, + "aggregation_control": { + "variable_column_name": "variable_id", + "groupby_attrs": [ + "activity_id", + "institution_id", + "source_id", + "experiment_id", + "frequency", + "member_id", + "table_id", + "grid_label", + "realm", + "variant_label" + ], + "aggregations": [ + { + "type": "union", + "attribute_name": "variable_id", + "options": {} + } + ] + }, + "id": "esm_catalog_CMIP_synthetic_r1i1p1f1_gr1.csv", + "description": null, + "title": null, + "last_updated": "2023-06-01", + "catalog_file": "file:/proj/MDTF-diagnostics/diagnostics/example_multicase/container_cat.csv" +} diff --git a/diagnostics/example_multicase/container_config_demo.jsonc b/diagnostics/example_multicase/container_config_demo.jsonc new file mode 100644 index 000000000..081896643 --- /dev/null +++ b/diagnostics/example_multicase/container_config_demo.jsonc @@ -0,0 +1,117 @@ +// This a template for configuring MDTF to run PODs that analyze multi-run/ensemble data +// +// Copy this file, rename it, and customize the settings as needed +// Pass your file to the framework using the -f/--input-file flag. +// Any other explicit command line options will override what's listed here. +// +// All text to the right of an unquoted "//" is a comment and ignored, as well +// as blank lines (JSONC quasi-standard.) +// +// Remove your test config file, or any changes you make to this template if you do not rename it, +// from your remote repository before you submit a PR for review. +// To generate CMIP synthetic data in the example dataset, run the following: +// > mamba env create --force -q -f ./src/conda/_env_synthetic_data.yml +// > conda activate _MDTF_synthetic_data +// > pip install mdtf-test-data +// > cd /mdtf +// > mkdir mdtf_test_data && cd mdtf_test_data +// > mdtf_synthetic.py -c CMIP --startyear 1980 --nyears 5 +// > mdtf_synthetic.py -c CMIP --startyear 1985 --nyears 5 +// Note that MODEL_DATA_ROOT assumes that mdtf_test_data is one directory above MDTF-diagnostics +// in this sample config file +{ + // Run each ensemble on the example POD. + // Add other PODs that work on ensemble datasets to the pod_list as needed + "pod_list" : [ + //"example" + "example_multicase" + ], + // Each case corresponds to a different simulation/output dataset + // startdate, enddate: either YYYY-MM-DD, YYYYMMDD:HHMMSS, or YYYY-MM-DD:HHMMSS + "case_list": + { + "CMIP_Synthetic_r1i1p1f1_gr1_19800101-19841231": + { + "model": "test", + "convention": "CMIP", + "startdate": "19800101", + "enddate": "19841231" + } + , + "CMIP_Synthetic_r1i1p1f1_gr1_19850101-19891231": + { + "model": "test", + "convention": "CMIP", + "startdate": "19850101", + "enddate": "19891231" + } + }, + // PATHS --------------------------------------------------------------------- + // Location of supporting data downloaded when the framework was installed. + // If a relative path is given, it's resolved relative to the MDTF-diagnostics + // code directory. Environment variables (eg, $HOME) can be referenced with a + // "$" and will be expended to their current values when the framework runs. + // Full or relative path to model data ESM-intake catalog header file + + "DATA_CATALOG": "/proj/MDTF-diagnostics/diagnostics/example_multicase/container_cat.json", + + // Parent directory containing observational data used by individual PODs. + "OBS_DATA_ROOT": "../inputdata/obs_data", + + // Working directory. + "WORK_DIR": "/proj/wkdir", + + // Directory to write output. The results of each run of the framework will be + // put in a subdirectory of this directory. Defaults to WORKING_DIR if blank. + "OUTPUT_DIR": "/proj/wkdir", + + // Location of the Anaconda/miniconda or micromamba installation to use for managing + // dependencies (path returned by running `conda info --base` or `micromamba info`.) + "conda_root": "/opt/conda", + + // Directory containing the framework-specific conda environments. This should + // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left + // blank, the framework will look for its environments in conda_root/envs + "conda_env_root": "/opt/conda/envs", + + // Location of the micromamba executable. Required if using micromamba + "micromamba_exe": "/bin/micromaba", + + // SETTINGS ------------------------------------------------------------------ + // Any command-line option recognized by the mdtf script (type `mdtf --help`) + // can be set here, in the form "flag name": "desired setting". + + // Settings affecting what output is generated: + // Set to true to run the preprocessor; default true: + "run_pp": true, + // Set to true to perform data translation; default false: + "translate_data": true, + // Set to true to have PODs save postscript figures in addition to bitmaps. + "save_ps": false, + + // Set to true for files > 4 GB + "large_file": false, + + // If true, leave pp data in OUTPUT_DIR after preprocessing; if false, delete pp data after PODs + // run to completion + "save_pp_data": true, + + // Set to true to save HTML and bitmap plots in a .tar file. + "make_variab_tar": false, + + // Generate html output for multiple figures per case + "make_multicase_figure_html": false, + + // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved + // under a unique name. + "overwrite": false, + + // List with custom preprocessing script(s) to run on data + // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository + "user_pp_scripts" : [], + + // Settings used in debugging: + + // Log verbosity level. + "verbose": 1 +} diff --git a/diagnostics/example_multicase/multirun_config_template.jsonc b/diagnostics/example_multicase/multirun_config_template.jsonc index 8c640f4ff..d1e628e6f 100644 --- a/diagnostics/example_multicase/multirun_config_template.jsonc +++ b/diagnostics/example_multicase/multirun_config_template.jsonc @@ -110,5 +110,5 @@ "overwrite": false, // List with custom preprocessing script(s) to run on data // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository - "user_pp_scripts" : ["example_pp_script.py"] + "user_pp_scripts" : [] } diff --git a/diagnostics/forcing_feedback/settings.jsonc b/diagnostics/forcing_feedback/settings.jsonc index 97d14ba52..b89b1726b 100644 --- a/diagnostics/forcing_feedback/settings.jsonc +++ b/diagnostics/forcing_feedback/settings.jsonc @@ -21,7 +21,7 @@ "max_frequency": "mon", "min_duration": "5yr", "max_duration": "any", - "realm" : "atmos", + "realm" : "atmos" }, "dimensions": { "lat": { @@ -46,75 +46,63 @@ "ts": { "standard_name": "surface_temperature", "units": "K", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "ta": { "standard_name": "air_temperature", "units": "K", - "dimensions" : ["time", "plev", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "plev", "lat", "lon"] }, "hus": { "standard_name": "specific_humidity", "units": "1", - "dimensions" : ["time", "plev", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "plev", "lat", "lon"] }, "rsus": { "standard_name": "surface_upwelling_shortwave_flux_in_air", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsuscs": { "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsds": { "standard_name": "surface_downwelling_shortwave_flux_in_air", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsdscs": { "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsdt": { "standard_name": "toa_incoming_shortwave_flux", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsut": { "standard_name": "toa_outgoing_shortwave_flux", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rsutcs": { "standard_name": "toa_outgoing_shortwave_flux_assuming_clear_sky", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rlut": { "standard_name": "toa_outgoing_longwave_flux", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] }, "rlutcs": { "standard_name": "toa_outgoing_longwave_flux_assuming_clear_sky", "units": "W m-2", - "dimensions" : ["time", "lat", "lon"], - "freq": "mon" + "dimensions" : ["time", "lat", "lon"] } } } diff --git a/diagnostics/seaice_suite/seaice_suite_sic_mean_sigma.py b/diagnostics/seaice_suite/seaice_suite_sic_mean_sigma.py index 043581b0b..6ef11127f 100644 --- a/diagnostics/seaice_suite/seaice_suite_sic_mean_sigma.py +++ b/diagnostics/seaice_suite/seaice_suite_sic_mean_sigma.py @@ -91,7 +91,7 @@ def readindata(file, varname='siconc', firstyr='1979', lastyr='2014'): # 1) Loading model data files: -input_file = "{DATADIR}/mon/{CASENAME}.{siconc_var}.mon.nc".format(**os.environ) +input_file = os.environ['SICONC_FILE'] obsoutput_dir = "{WORK_DIR}/obs/".format(**os.environ) modoutput_dir = "{WORK_DIR}/model/".format(**os.environ) figures_dir = "{WORK_DIR}/model/".format(**os.environ) diff --git a/diagnostics/stc_eddy_heat_fluxes/settings.jsonc b/diagnostics/stc_eddy_heat_fluxes/settings.jsonc index 3cf0e6136..7b5eb081f 100644 --- a/diagnostics/stc_eddy_heat_fluxes/settings.jsonc +++ b/diagnostics/stc_eddy_heat_fluxes/settings.jsonc @@ -27,9 +27,6 @@ "python3": ["matplotlib", "numpy", "pandas", "xarray", "xesmf"] } }, - "data": { - "realm" : "atmos" - }, "dimensions": { "lat": { diff --git a/diagnostics/tropical_pacific_sea_level/settings.jsonc b/diagnostics/tropical_pacific_sea_level/settings.jsonc index 726fad155..540b7b426 100644 --- a/diagnostics/tropical_pacific_sea_level/settings.jsonc +++ b/diagnostics/tropical_pacific_sea_level/settings.jsonc @@ -68,7 +68,6 @@ "standard_name": "cell_area", "realm": "ocean", "units": "m2", - "modifier" : "ocean_realm", "dimensions" : ["lat", "lon"] } } diff --git a/doc/_static/MDTF_Variable_Lists.html b/doc/_static/MDTF_Variable_Lists.html new file mode 100644 index 000000000..d18fe3206 --- /dev/null +++ b/doc/_static/MDTF_Variable_Lists.html @@ -0,0 +1,2694 @@ + + + + + + + MDTF PODs Variable Lists (Tables For Individual POD and Combined All PODs) + + + +

MDTF PODs Variable Lists (Tables For Individual POD and Combined All PODs)

+

Generated at 2024/11/01 15:52:40. For questions, contact Wenhao.Dong@noaa.gov

+ +

POD: ENSO_MSE

+
Convective Transition Statistics +Convective Transition Statistics {{CASENAME}} OBS
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zgmatmostime, lev, lat, lonmongeopotential_heightcesm
uam s-1atmostime, lev, lat, lonmoneastward_windcesm
vam s-1atmostime, lev, lat, lonmonnorthward_windcesm
taKatmostime, lev, lat, lonmonair_temperaturecesm
hus1atmostime, lev, lat, lonmonspecific_humiditycesm
wapPa s-1atmostime, lev, lat, lonmonlagrangian_tendency_of_air_pressurecesm
prkg m-2 s-1atmostime, lat, lonmonprecipitation_fluxcesm
tsKatmostime, lat, lonmonsurface_temperaturecesm
hfssW m-2atmostime, lat, lonmonsurface_upward_sensible_heat_fluxcesm
hflsW m-2atmostime, lat, lonmonsurface_upward_latent_heat_fluxcesm
rsusW m-2atmostime, lat, lonmonsurface_upwelling_shortwave_flux_in_aircesm
rsdsW m-2atmostime, lat, lonmonsurface_downwelling_shortwave_flux_in_aircesm
rsdtW m-2atmostime, lat, lonmontoa_incoming_shortwave_fluxcesm
rsutW m-2atmostime, lat, lonmontoa_outgoing_shortwave_fluxcesm
rlusW m-2atmostime, lat, lonmonsurface_upwelling_longwave_flux_in_aircesm
rldsW m-2atmostime, lat, lonmonsurface_downwelling_longwave_flux_in_aircesm
rlutW m-2atmostime, lat, lonmontoa_outgoing_longwave_fluxcesm
+ +

POD: ENSO_RWS

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zgmatmostime, lev, lat, lonmongeopotential_heightcesm
uam s-1atmostime, lev, lat, lonmoneastward_windcesm
vam s-1atmostime, lev, lat, lonmonnorthward_windcesm
taKatmostime, lev, lat, lonmonair_temperaturecesm
wapPa s-1atmostime, lev, lat, lonmonlagrangian_tendency_of_air_pressurecesm
prkg m-2 s-1atmostime, lat, lonmonprecipitation_fluxcesm
tsKatmostime, lat, lonmonsurface_temperaturecesm
+ +

POD: EOF_500hPa

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zgmatmostime, lat, lonmongeopotential_heightcesm
zg_hybrid_sigmamatmostime, lev, lat, lonmongeopotential_heightcesm
psPaatmostime, lat, lonmonsurface_air_pressurecesm
+ +

POD: MJO_prop_amp

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
prkg m-2 s-1atmostime, lat, londayprecipitation_fluxcesm
prwkg m-2atmostime, lat, londayatmosphere_mass_content_of_water_vaporcesm
hus1atmostime, lev, lat, londayspecific_humiditycesm
+ +

POD: MJO_suite

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
rlutW m-2atmostime, lat, londaytoa_outgoing_longwave_fluxcesm
prm s-1atmostime, lat, londayprecipitation_ratecesm
u200m s-1atmostime, lat, londayeastward_windcesm
u850m s-1atmostime, lat, londayeastward_windcesm
v200m s-1atmostime, lat, londaynorthward_windcesm
v850m s-1atmostime, lat, londaynorthward_windcesm
+ +

POD: MJO_teleconnection

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
rlutW m-2atmostime, lat, londaytoa_outgoing_longwave_fluxcesm
prkg m-2 s-1atmostime, lat, londayprecipitation_fluxcesm
u250m s-1atmostime, lat, londayeastward_windcesm
u850m s-1atmostime, lat, londayeastward_windcesm
z250matmostime, lat, londaygeopotential_heightcesm
+ +

POD: SM_ET_coupling

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
mrsoskg m-2landtime, lat, lonmonmass_content_of_water_in_soil_layercmip
evspsblkg m-2 s-1landtime, lat, lonmonwater_evapotranspiration_fluxcmip
prkg m-2 s-1atmostime, lat, lonmonprecipitation_fluxcmip
+ +

POD: TC_MSE

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
taKatmostime, plev, lat, lon6hrair_temperaturecmip
zgmatmostime, plev, lat, lon6hrgeopotential_heightcmip
hus1atmostime, plev, lat, lon6hrspecific_humiditycmip
hfssW m-2atmostime, lat, lon6hrsurface_upward_sensible_heat_fluxcmip
hflsW m-2atmostime, lat, lon6hrsurface_upward_latent_heat_fluxcmip
rldsW m-2atmostime, lat, lon6hrsurface_downwelling_longwave_flux_in_aircmip
rlusW m-2atmostime, lat, lon6hrsurface_upwelling_longwave_flux_in_aircmip
rlutW m-2atmostime, lat, lon6hrtoa_outgoing_longwave_fluxcmip
rsdsW m-2atmostime, lat, lon6hrsurface_downwelling_shortwave_flux_in_aircmip
rsdtW m-2atmostime, lat, lon6hrtoa_incoming_shortwave_fluxcmip
rsusW m-2atmostime, lat, lon6hrsurface_upwelling_shortwave_flux_in_aircmip
rsutW m-2atmostime, lat, lon6hrtoa_outgoing_shortwave_fluxcmip
+ +

POD: Wheeler_Kiladis

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
rlutW m-2atmostime, lat, londaytoa_outgoing_longwave_fluxcesm
prm s-1atmostime, lat, londayprecipitation_ratecesm
omega500Pa s-1atmostime, lat, londaylagrangian_tendency_of_air_pressurecesm
u200m s-1atmostime, lat, londayeastward_windcesm
u850m s-1atmostime, lat, londayeastward_windcesm
+ +

POD: albedofb

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
areacellam2atmoslat, lonN/Acell_areacmip
tasKatmostime, lat, lonmonair_temperaturecmip
rsdtW m-2atmostime, lat, lonmontoa_incoming_shortwave_fluxcmip
rsdsW m-2atmostime, lat, lonmonsurface_downwelling_shortwave_flux_in_aircmip
rsutW m-2atmostime, lat, lonmontoa_outgoing_shortwave_fluxcmip
rsusW m-2atmostime, lat, lonmonsurface_upwelling_shortwave_flux_in_aircmip
+ +

POD: blocking_neale

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zgmatmostime, lat, londaygeopotential_heightcesm
+ +

POD: convective_transition_diag

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
prkg m-2 s-1atmostime, lat, lon1hrprecipitation_fluxcmip
prwkg m-2atmostime, lat, lon1hratmosphere_mass_content_of_water_vaporcmip
taveKatmostime, lat, lon1hrmass_weighted_column_average_temperaturecmip
qsat_intkg m-2atmostime, lat, lon1hrcolumn_integrated_saturation_humiditycmip
taKatmostime, lev, lat, lon1hrair_temperaturecmip
+ +

POD: eulerian_storm_track

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
v850m s-1atmostime, lat, lon6hrnorthward_windcmip
+ +

POD: example

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tasKatmostime, lat, londayair_temperaturecmip
+ +

POD: example_multicase

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tasKatmostime, lat, londayair_temperaturecmip
+ +

POD: example_notebook

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tasKatmostime, lat, londayair_temperaturecmip
+ +

POD: forcing_feedback

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tsKN/Atime, lat, lonmonsurface_temperaturecmip
taKN/Atime, plev, lat, lonmonair_temperaturecmip
hus1N/Atime, plev, lat, lonmonspecific_humiditycmip
rsusW m-2N/Atime, lat, lonmonsurface_upwelling_shortwave_flux_in_aircmip
rsuscsW m-2N/Atime, lat, lonmonsurface_upwelling_shortwave_flux_in_air_assuming_clear_skycmip
rsdsW m-2N/Atime, lat, lonmonsurface_downwelling_shortwave_flux_in_aircmip
rsdscsW m-2N/Atime, lat, lonmonsurface_downwelling_shortwave_flux_in_air_assuming_clear_skycmip
rsdtW m-2N/Atime, lat, lonmontoa_incoming_shortwave_fluxcmip
rsutW m-2N/Atime, lat, lonmontoa_outgoing_shortwave_fluxcmip
rsutcsW m-2N/Atime, lat, lonmontoa_outgoing_shortwave_flux_assuming_clear_skycmip
rlutW m-2N/Atime, lat, lonmontoa_outgoing_longwave_fluxcmip
rlutcsW m-2N/Atime, lat, lonmontoa_outgoing_longwave_flux_assuming_clear_skycmip
+ +

POD: mixed_layer_depth

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
sopsuoceantime, lev, lat, lonmonsea_water_salinitycmip
thetaodegCoceantime, lev, lat, lonmonsea_water_potential_temperaturecmip
+ +

POD: ocn_surf_flux_diag

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tsKatmostime, lat, londaysurface_temperaturecmip
pslPaatmostime, lat, londayair_pressure_at_mean_sea_levelcmip
sfcWindm s-1atmostime, lat, londaywind_speedcmip
huss1atmostime, lat, londayspecific_humiditycmip
hflsW m-2atmostime, lat, londaysurface_upward_latent_heat_fluxcmip
prkg m-2 s-1atmostime, lat, londayprecipitation_fluxcmip
+ +

POD: precip_buoy_diag

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
prkg m-2 s-1atmostime, lat, lonN/Aprecipitation_fluxcmip
taKatmostime, lev, lat, lonN/Aair_temperaturecmip
qakg/kgatmostime, lev, lat, lonN/Aspecific_humiditycmip
psPaatmostime, lat, lonN/Asurface_air_pressurecmip
+ +

POD: precip_diurnal_cycle

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
prkg m-2 s-1atmostime, lat, lon3hrprecipitation_fluxcmip
+ +

POD: seaice_suite

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
siconc%seaIcetime, lat, lonmonsea_ice_area_fractioncmip
+ +

POD: stc_annular_modes

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zgmatmostime, lev, latdaygeopotential_heightcmip
+ +

POD: stc_eddy_heat_fluxes

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
v100m s-1atmostime, lat, lonmonnorthward_windcmip
t100Katmostime, lat, lonmonair_temperaturecmip
t50Katmostime, lat, lonmonair_temperaturecmip
vam s-1atmostime, lev, lat, lonmonnorthward_windcmip
taKatmostime, lev, lat, lonmonair_temperaturecmip
zgmatmostime, lev, lat, lonmongeopotential_heightcmip
+ +

POD: stc_ozone

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
uam s-1atmostime, lev, lat, lonmoneastward_windcmip
taKatmostime, lev, lat, lonmonair_temperaturecmip
o3mol mol-1aerosoltime, lev, lat, lonmonmole_fraction_of_ozone_in_aircmip
+ +

POD: stc_qbo_enso

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tosdegCoceantime, lat, lonmonsea_surface_temperatureN/A
uam s-1atmostime, lev, lat, lonmoneastward_windN/A
vam s-1atmostime, lev, lat, lonmonnorthward_windN/A
taKatmostime, lev, lat, lonmonair_temperatureN/A
pslPaatmostime, lat, lonmonair_pressure_at_mean_sea_levelN/A
+ +

POD: stc_spv_extremes

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
uam s-1N/Atime, plev, latdayeastward_windN/A
zgmN/Atime, plev, latdaygeopotential_heightN/A
zg500mN/Atime, lat, londaygeopotential_heightN/A
tasKN/Atime, lat, londayair_temperatureN/A
+ +

POD: stc_vert_wave_coupling

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
va50m s-1atmostime, lat, londaynorthward_windcmip
ta50Katmostime, lat, londayair_temperaturecmip
zg10matmostime, lat, londaygeopotential_heightcmip
zg500matmostime, lat, londaygeopotential_heightcmip
+ +

POD: temp_extremes_distshape

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
tasKatmostime, lat, londayair_temperaturecmip
zgmatmostime, lat, londaygeopotential_heightcmip
pslPaatmostime, lat, londayair_pressure_at_mean_sea_levelcmip
+ +

POD: top_heaviness_metric

+
+ + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
omegaPa s-1atmoslev, lat, lonmonlagrangian_tendency_of_air_pressurecmip
+ +

POD: tropical_pacific_sea_level

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameConvention
zosmoceantime, lat, lonmonsea_surface_height_above_geoidcmip
tauuoN m-2oceantime, lat, lonmondownward_x_stress_at_sea_water_surfacecmip
tauvoN m-2oceantime, lat, lonmondownward_y_stress_at_sea_water_surfacecmip
areacellom2oceanlat, lonmoncell_areacmip
+ +

All PODs

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableUnitsRealmDimensionsFrequencyStandard NameUsed by
areacellam2atmoslat, lonN/Acell_areaalbedofb
areacellom2oceanlat, lonmoncell_areatropical_pacific_sea_level
evspsblkg m-2 s-1landtime, lat, lonmonwater_evapotranspiration_fluxSM_ET_coupling
hflsW m-2atmostime, lat, lon6hrsurface_upward_latent_heat_fluxTC_MSE
hflsW m-2atmostime, lat, londaysurface_upward_latent_heat_fluxocn_surf_flux_diag
hflsW m-2atmostime, lat, lonmonsurface_upward_latent_heat_fluxENSO_MSE
hfssW m-2atmostime, lat, lon6hrsurface_upward_sensible_heat_fluxTC_MSE
hfssW m-2atmostime, lat, lonmonsurface_upward_sensible_heat_fluxENSO_MSE
hus1N/Atime, plev, lat, lonmonspecific_humidityforcing_feedback
hus1atmostime, lev, lat, londayspecific_humidityMJO_prop_amp
hus1atmostime, lev, lat, lonmonspecific_humidityENSO_MSE
hus1atmostime, plev, lat, lon6hrspecific_humidityTC_MSE
huss1atmostime, lat, londayspecific_humidityocn_surf_flux_diag
mrsoskg m-2landtime, lat, lonmonmass_content_of_water_in_soil_layerSM_ET_coupling
o3mol mol-1aerosoltime, lev, lat, lonmonmole_fraction_of_ozone_in_airstc_ozone
omegaPa s-1atmoslev, lat, lonmonlagrangian_tendency_of_air_pressuretop_heaviness_metric
omega500Pa s-1atmostime, lat, londaylagrangian_tendency_of_air_pressureWheeler_Kiladis
prkg m-2 s-1atmostime, lat, lon1hrprecipitation_fluxconvective_transition_diag
prkg m-2 s-1atmostime, lat, lon3hrprecipitation_fluxprecip_diurnal_cycle
prkg m-2 s-1atmostime, lat, lonN/Aprecipitation_fluxprecip_buoy_diag
prkg m-2 s-1atmostime, lat, londayprecipitation_fluxMJO_prop_amp, MJO_teleconnection, ocn_surf_flux_diag
prkg m-2 s-1atmostime, lat, lonmonprecipitation_fluxENSO_MSE, ENSO_RWS, SM_ET_coupling
prm s-1atmostime, lat, londayprecipitation_rateMJO_suite, Wheeler_Kiladis
prwkg m-2atmostime, lat, lon1hratmosphere_mass_content_of_water_vaporconvective_transition_diag
prwkg m-2atmostime, lat, londayatmosphere_mass_content_of_water_vaporMJO_prop_amp
psPaatmostime, lat, lonN/Asurface_air_pressureprecip_buoy_diag
psPaatmostime, lat, lonmonsurface_air_pressureEOF_500hPa
pslPaatmostime, lat, londayair_pressure_at_mean_sea_levelocn_surf_flux_diag, temp_extremes_distshape
pslPaatmostime, lat, lonmonair_pressure_at_mean_sea_levelstc_qbo_enso
qakg/kgatmostime, lev, lat, lonN/Aspecific_humidityprecip_buoy_diag
qsat_intkg m-2atmostime, lat, lon1hrcolumn_integrated_saturation_humidityconvective_transition_diag
rldsW m-2atmostime, lat, lon6hrsurface_downwelling_longwave_flux_in_airTC_MSE
rldsW m-2atmostime, lat, lonmonsurface_downwelling_longwave_flux_in_airENSO_MSE
rlusW m-2atmostime, lat, lon6hrsurface_upwelling_longwave_flux_in_airTC_MSE
rlusW m-2atmostime, lat, lonmonsurface_upwelling_longwave_flux_in_airENSO_MSE
rlutW m-2N/Atime, lat, lonmontoa_outgoing_longwave_fluxforcing_feedback
rlutW m-2atmostime, lat, lon6hrtoa_outgoing_longwave_fluxTC_MSE
rlutW m-2atmostime, lat, londaytoa_outgoing_longwave_fluxMJO_suite, MJO_teleconnection, Wheeler_Kiladis
rlutW m-2atmostime, lat, lonmontoa_outgoing_longwave_fluxENSO_MSE
rlutcsW m-2N/Atime, lat, lonmontoa_outgoing_longwave_flux_assuming_clear_skyforcing_feedback
rsdsW m-2N/Atime, lat, lonmonsurface_downwelling_shortwave_flux_in_airforcing_feedback
rsdsW m-2atmostime, lat, lon6hrsurface_downwelling_shortwave_flux_in_airTC_MSE
rsdsW m-2atmostime, lat, lonmonsurface_downwelling_shortwave_flux_in_airENSO_MSE, albedofb
rsdscsW m-2N/Atime, lat, lonmonsurface_downwelling_shortwave_flux_in_air_assuming_clear_skyforcing_feedback
rsdtW m-2N/Atime, lat, lonmontoa_incoming_shortwave_fluxforcing_feedback
rsdtW m-2atmostime, lat, lon6hrtoa_incoming_shortwave_fluxTC_MSE
rsdtW m-2atmostime, lat, lonmontoa_incoming_shortwave_fluxENSO_MSE, albedofb
rsusW m-2N/Atime, lat, lonmonsurface_upwelling_shortwave_flux_in_airforcing_feedback
rsusW m-2atmostime, lat, lon6hrsurface_upwelling_shortwave_flux_in_airTC_MSE
rsusW m-2atmostime, lat, lonmonsurface_upwelling_shortwave_flux_in_airENSO_MSE, albedofb
rsuscsW m-2N/Atime, lat, lonmonsurface_upwelling_shortwave_flux_in_air_assuming_clear_skyforcing_feedback
rsutW m-2N/Atime, lat, lonmontoa_outgoing_shortwave_fluxforcing_feedback
rsutW m-2atmostime, lat, lon6hrtoa_outgoing_shortwave_fluxTC_MSE
rsutW m-2atmostime, lat, lonmontoa_outgoing_shortwave_fluxENSO_MSE, albedofb
rsutcsW m-2N/Atime, lat, lonmontoa_outgoing_shortwave_flux_assuming_clear_skyforcing_feedback
sfcWindm s-1atmostime, lat, londaywind_speedocn_surf_flux_diag
siconc%seaIcetime, lat, lonmonsea_ice_area_fractionseaice_suite
sopsuoceantime, lev, lat, lonmonsea_water_salinitymixed_layer_depth
t100Katmostime, lat, lonmonair_temperaturestc_eddy_heat_fluxes
t50Katmostime, lat, lonmonair_temperaturestc_eddy_heat_fluxes
taKN/Atime, plev, lat, lonmonair_temperatureforcing_feedback
taKatmostime, lev, lat, lon1hrair_temperatureconvective_transition_diag
taKatmostime, lev, lat, lonN/Aair_temperatureprecip_buoy_diag
taKatmostime, lev, lat, lonmonair_temperatureENSO_MSE, ENSO_RWS, stc_eddy_heat_fluxes, stc_ozone, stc_qbo_enso
taKatmostime, plev, lat, lon6hrair_temperatureTC_MSE
ta50Katmostime, lat, londayair_temperaturestc_vert_wave_coupling
tasKN/Atime, lat, londayair_temperaturestc_spv_extremes
tasKatmostime, lat, londayair_temperatureexample, example_multicase, example_notebook, temp_extremes_distshape
tasKatmostime, lat, lonmonair_temperaturealbedofb
tauuoN m-2oceantime, lat, lonmondownward_x_stress_at_sea_water_surfacetropical_pacific_sea_level
tauvoN m-2oceantime, lat, lonmondownward_y_stress_at_sea_water_surfacetropical_pacific_sea_level
taveKatmostime, lat, lon1hrmass_weighted_column_average_temperatureconvective_transition_diag
thetaodegCoceantime, lev, lat, lonmonsea_water_potential_temperaturemixed_layer_depth
tosdegCoceantime, lat, lonmonsea_surface_temperaturestc_qbo_enso
tsKN/Atime, lat, lonmonsurface_temperatureforcing_feedback
tsKatmostime, lat, londaysurface_temperatureocn_surf_flux_diag
tsKatmostime, lat, lonmonsurface_temperatureENSO_MSE, ENSO_RWS
u200m s-1atmostime, lat, londayeastward_windMJO_suite, Wheeler_Kiladis
u250m s-1atmostime, lat, londayeastward_windMJO_teleconnection
u850m s-1atmostime, lat, londayeastward_windMJO_suite, MJO_teleconnection, Wheeler_Kiladis
uam s-1N/Atime, plev, latdayeastward_windstc_spv_extremes
uam s-1atmostime, lev, lat, lonmoneastward_windENSO_MSE, ENSO_RWS, stc_ozone, stc_qbo_enso
v100m s-1atmostime, lat, lonmonnorthward_windstc_eddy_heat_fluxes
v200m s-1atmostime, lat, londaynorthward_windMJO_suite
v850m s-1atmostime, lat, lon6hrnorthward_windeulerian_storm_track
v850m s-1atmostime, lat, londaynorthward_windMJO_suite
vam s-1atmostime, lev, lat, lonmonnorthward_windENSO_MSE, ENSO_RWS, stc_eddy_heat_fluxes, stc_qbo_enso
va50m s-1atmostime, lat, londaynorthward_windstc_vert_wave_coupling
wapPa s-1atmostime, lev, lat, lonmonlagrangian_tendency_of_air_pressureENSO_MSE, ENSO_RWS
z250matmostime, lat, londaygeopotential_heightMJO_teleconnection
zgmN/Atime, plev, latdaygeopotential_heightstc_spv_extremes
zgmatmostime, lat, londaygeopotential_heightblocking_neale, temp_extremes_distshape
zgmatmostime, lat, lonmongeopotential_heightEOF_500hPa
zgmatmostime, lev, latdaygeopotential_heightstc_annular_modes
zgmatmostime, lev, lat, lonmongeopotential_heightENSO_MSE, ENSO_RWS, stc_eddy_heat_fluxes
zgmatmostime, plev, lat, lon6hrgeopotential_heightTC_MSE
zg10matmostime, lat, londaygeopotential_heightstc_vert_wave_coupling
zg500mN/Atime, lat, londaygeopotential_heightstc_spv_extremes
zg500matmostime, lat, londaygeopotential_heightstc_vert_wave_coupling
zg_hybrid_sigmamatmostime, lev, lat, lonmongeopotential_heightEOF_500hPa
zosmoceantime, lat, lonmonsea_surface_height_above_geoidtropical_pacific_sea_level
+ + + diff --git a/doc/conf.py b/doc/conf.py index 457c3c44f..a5704b5b3 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -154,7 +154,7 @@ def __init__(self, units=None, calendar=None, formatted=False, names=False, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # Sphinx automatically copies referenced image files. -# html_static_path = ['_static'] +html_static_path = ['_static'] html_logo = 'img/logo_MDTF.png' diff --git a/doc/sphinx/dev_start.rst b/doc/sphinx/dev_start.rst index cb2be8c03..f6464a26b 100644 --- a/doc/sphinx/dev_start.rst +++ b/doc/sphinx/dev_start.rst @@ -26,6 +26,10 @@ Developers may download the code from GitHub as described in :ref:`ref-download` clone the repo in order to keep up with changes in the main branch, and to simplify submitting pull requests with your POD's code. Instructions for how to do this are given in :doc:`dev_git_intro`. +Users may also install and run the MDTF-diagnostics Docker container that includes pre-built base, python3_base, and synthetic_data Conda environments (NCL is not compatible with Docker). +Further details can be found in :doc:`ref_container`. The container is a new addition. +Beta Testers are very much welcome! + Installing dependencies with Conda ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx/ref_container.rst b/doc/sphinx/ref_container.rst new file mode 100644 index 000000000..f7efbb428 --- /dev/null +++ b/doc/sphinx/ref_container.rst @@ -0,0 +1,89 @@ +.. role:: code-rst(code) + :language: reStructuredText +.. _ref-container: +Container Reference +=============================== +This section provides basic directions for downloading, +installing, and running the example_multicase POD in the +Model Diagnostics Task Force (MDTF) container. + +Getting the Container +------------------------------- +The container assumes that the MDTF-diangnostics GitHub repo is located on your local machine. +If you have not already, please clone the repo to your local machine with: + + .. code-block:: bash + + git clone https://github.com/NOAA-GFDL/MDTF-diagnostics.git + +The container can then be pulled from the GitHub +container registry with the command: + + .. code-block:: bash + + docker pull ghcr.io/noaa-gfdl/mdtf-diagnostics:container + +or with the equivalent command in your container software. +If you do not have a container software, Docker can be downloaded from `here `_. + +Launching the Container +------------------------------- +The container itself can be launched with Docker using: + + .. code-block:: bash + + docker run -it -v {DIAG_DIR}:/proj/MDTF-diagnostics/diagnostics/ -v {WKDIR}:/proj/wkdir mdtf + +wherein: + * :code-rst:`{DIAG_DIR}` is the path to the diagnostics directory on your local machine. + This volume is not required, but heavily recommended. + * :code-rst:`{WKDIR}` is where you would like to store the output on your local machine. + This allows the output HTML to be reachable without having to open a port to the container. + +These happen to be the only required volumes. Further volumes may need to be mounted including volumes such as data storage. + +Generating Synthetic Data +------------------------------- +Now that we are in the container, we can create some data to run the POD with. +The MDTF has a synthetic data generator for just this case. First, move into the MDTF-diagnostics dir: + + .. code-block:: bash + + cd /proj/MDTF-diagnostics/ + +We generate our synthetic data by running: + + .. code-block:: bash + + micromamba activate _MDTF_synthetic_data + pip install mdtf-test-data + mkdir mdtf_test_data && cd mdtf_test_data + mdtf_synthetic.py -c CMIP --startyear 1980 --nyears 5 + mdtf_synthetic.py -c CMIP --startyear 1985 --nyears 5 + +Now would be a good time to generate a catalog for the synthetic data, but, in the sake +of testing, we provide a catalog for the files needed to run the example POD. + +Running the POD +------------------------------- +The POD can now be ran using: + + .. code-block:: bash + + micromamba activate _MDTF_base + mdtf_framework.py -f /proj/MDTF-diagnostics/diagnostics/example_multicase/container_config_demo.jsonc + +The results can be found in :code-rst:`/proj/wkdir/` + +Building the Container +-------------------------------- +If you would like, you can build the container using the Dockerfile found in the GitHub repo. +If using podman (as required internally at the GFDL), +please build with the command: + + .. code-block:: bash + + podman build . --format docker -t mdtf + +:code-rst:`--format docker` is essential to have your copy commands work and +have the expected permissions in your container. diff --git a/doc/sphinx/ref_output.rst b/doc/sphinx/ref_output.rst index fb6d8be5d..0e66ebf39 100644 --- a/doc/sphinx/ref_output.rst +++ b/doc/sphinx/ref_output.rst @@ -64,11 +64,10 @@ These files and folders are: :code-rst:`index.html`. * :code-rst:`case_info.yml` provides environment variables for each case. Multirun PODs can read and set the environment variables from this file following the - `example_multicase.py `__ - template + `example_multicase.py template `__ * :code-rst:`model/` and :code-rst:`obs/` contain both plots and data for both the model data and observation data respectively. The framework appends a temporary :code-rst:`PS` subdirectory to the :code-rst:`model` and - :code-rst:`obs`directories where PODs can write postscript files instead of png files. The framework will convert + :code-rst:`obs` directories where PODs can write postscript files instead of png files. The framework will convert any .(e)ps files in the :code-rst:`PS` subdirectories to .png files and move them to the :code-rst:`model` and/or :code-rst:`obs` subdirectories, then delete the :code-rst:`PS` subdirectories during the output generation stage. Users can retain the :code-rst:`PS` diff --git a/doc/sphinx/ref_toc.rst b/doc/sphinx/ref_toc.rst index 13188ccb3..25eab94f6 100644 --- a/doc/sphinx/ref_toc.rst +++ b/doc/sphinx/ref_toc.rst @@ -11,3 +11,5 @@ Framework reference ref_envvars ref_output ref_submodules + ref_container + ref_vartable diff --git a/doc/sphinx/ref_vartable.rst b/doc/sphinx/ref_vartable.rst new file mode 100644 index 000000000..bb60d67ab --- /dev/null +++ b/doc/sphinx/ref_vartable.rst @@ -0,0 +1,5 @@ +.. _ref-vartable + +List of Variables for each POD +============================== +The latest list of variables can be found `here <../_static/MDTF_Variable_Lists.html>`_. diff --git a/doc/sphinx/start_config.rst b/doc/sphinx/start_config.rst index 21b98a403..e402e96a1 100644 --- a/doc/sphinx/start_config.rst +++ b/doc/sphinx/start_config.rst @@ -147,7 +147,11 @@ Options for workflow control * **run_pp**: (boolean) Set to *true* to run the preprocessor; default *true* -* **translate_data**: (boolean) Set to *true* to perform data translation; default *true* +* **translate_data**: (boolean) Set to *true* to perform data translation. If *false*, the preprocessor query + automatically uses the convention for each case in the input dataset for the query, and skips translating the + variable names and attributes to the POD convention. Note that this means that the precipRateToFluxConversion is not + applied. This option is best if you know that the input dataset has variable attributes that exactly match the + the POD variable attributes; default *true* * **save_ps**: (boolean) Set to *true* to have PODs save postscript figures in addition to bitmaps; default *false* diff --git a/doc/sphinx/start_install.rst b/doc/sphinx/start_install.rst index 5894301bd..c0ee01fe7 100644 --- a/doc/sphinx/start_install.rst +++ b/doc/sphinx/start_install.rst @@ -192,9 +192,6 @@ environments in your conda installation. The installation process should finish Substitute the paths identified above for <*CONDA_ROOT*> and <*CONDA_ENV_DIR*>. -If the ``--env_dir`` flag is omitted, the environment files will be installed in your system's conda's default -location (usually <*CONDA_ROOT*>/envs). - Install all the package's conda environments with micromamba by running .. code-block:: console diff --git a/src/conda/env_dev.yml b/src/conda/env_dev.yml index 946860142..36fea5e5f 100644 --- a/src/conda/env_dev.yml +++ b/src/conda/env_dev.yml @@ -6,15 +6,30 @@ dependencies: # contents of python3_base - python=3.12 - numpy=1.26.4 -- scipy=1.11.2 -- netCDF4=1.6.4 +- scipy=1.14.0 +- netCDF4=1.6.5 - cftime=1.6.2 +- scikit-learn=1.4.2 +- xesmf=0.8.1 +- esmf=8.4.2 +- esmpy=8.4.2 - xarray=2024.1.1 - matplotlib=3.8.2 - cartopy=0.22.0 +- cython=3.0.2 - pandas=2.2.2 - pint=0.24.3 +- gsw=3.6.17 +- h5py=3.9.0 +- nc-time-axis=1.4.1 - dask=2024.7.1 +- pyyaml=6.0.1 +- cfunits=3.3.6 +- intake=0.7.0 +- intake-esm=2024.2.6 +- kerchunk=0.2.7 +- intake-esgf=2024.12.7 +- cf_xarray=0.8.4 # additional development tools - jupyter_core=5.3.1 - jupyterlab=4.0.5 @@ -23,9 +38,10 @@ dependencies: - pylint=2.17.5 - doc8=1.1.1 - jinja2=3.1.2 -#- latexmk=4.76 -- cfunits=3.3.6 -- intake=0.7.0 -- intake-esm=2024.2.6 -- cf_xarray=0.8.4 - cloud_sptheme +- snakeviz=2.2.0 +- graphviz=2.50.0 +- pip=24.3.1 +- pip: + - gprof2dot==2024.6.6 + - viztracer==1.0.0 diff --git a/src/data_model.py b/src/data_model.py index 8a7046cb5..d6aa38120 100644 --- a/src/data_model.py +++ b/src/data_model.py @@ -126,6 +126,12 @@ def long_name(self): """ pass + @property + @abc.abstractmethod + def alternate_standard_names(self): + """Optional list of alternate variable standard_names to query""" + pass + class AbstractDMCoordinateBounds(AbstractDMDependentVariable): """Defines interface (set of attributes) for :class:`DMCoordinateBounds` @@ -764,6 +770,7 @@ class DMDependentVariable(_DMDimensionsMixin, AbstractDMDependentVariable): component: str = "" associated_files: str = "" rename_coords: bool = True + alternate_standard_names: list # dims: from _DMDimensionsMixin # scalar_coords: from _DMDimensionsMixin @@ -860,9 +867,17 @@ def realm(self): return self._realm @realm.setter - def realm(self, value: str): + def realm(self, value: str | list): self._realm = value + @property + def alternate_standard_names(self): + return self._alternate_standard_names + + @alternate_standard_names.setter + def alternate_standard_names(self, value: list): + self._alternate_standard_names = value + def add_scalar(self, ax, ax_value, **kwargs): """Metadata operation corresponding to taking a slice of a higher-dimensional variable (extracting its values at axis *ax* = *ax_value*). The diff --git a/src/data_sources.py b/src/data_sources.py index e862c0a0e..a8db76c1e 100644 --- a/src/data_sources.py +++ b/src/data_sources.py @@ -64,6 +64,36 @@ def read_varlist(self, parent, append_vars: bool=False): def set_date_range(self, startdate: str, enddate: str): self.date_range = util.DateRange(start=startdate, end=enddate) + + def set_query(self, var: varlist_util.VarlistEntry, path_regex: str): + realm_regex = var.realm + '*' + date_range = var.T.range + var_id = var.name + standard_name = var.standard_name + if var.translation.convention is not None: + var_id = var.translation.name + standard_name = var.translation.standard_name + if any(var.translation.alternate_standard_names): + standard_name = [var.translation.standard_name] + var.translation.alternate_standard_names + date_range = var.translation.T.range + if var.is_static: + date_range = None + freq = "fx" + else: + freq = var.T.frequency + if not isinstance(freq, str): + freq = freq.format_local() + if freq == 'hr': + freq = '1hr' + + # define initial query dictionary with variable settings requirements that do not change if + # the variable is translated + self.query['frequency'] = freq + self.query['path'] = path_regex + self.query['realm'] = realm_regex + self.query['standard_name'] = standard_name + self.query['variable_id'] = var_id + def translate_varlist(self, var: varlist_util.VarlistEntry, @@ -94,7 +124,10 @@ class CMIPDataSource(DataSourceBase): # col_spec = sampleLocalFileDataSource_col_spec # varlist = diagnostic.varlist convention: str = "CMIP" - + + def set_query(self, var: varlist_util.VarlistEntry, path_regex: str): + super().set_query(var, path_regex) + return @data_source.maker class CESMDataSource(DataSourceBase): @@ -105,7 +138,10 @@ class CESMDataSource(DataSourceBase): # col_spec = sampleLocalFileDataSource_col_spec # varlist = diagnostic.varlist convention: str = "CESM" - + + def set_query(self, var: varlist_util.VarlistEntry, path_regex: str): + super().set_query(var, path_regex) + return @data_source.maker class GFDLDataSource(DataSourceBase): @@ -116,3 +152,10 @@ class GFDLDataSource(DataSourceBase): # col_spec = sampleLocalFileDataSource_col_spec # varlist = diagnostic.varlist convention: str = "GFDL" + + def set_query(self, var: varlist_util.VarlistEntry, path_regex: str): + super().set_query(var, path_regex) + # this is hacky, but prevents the framework from grabbing from ice_1x1deg + if self.query['realm'] == 'seaIce*': + self.query['realm'] = 'ice' + return diff --git a/src/environment_manager.py b/src/environment_manager.py index cc7da2e8a..10f996c61 100644 --- a/src/environment_manager.py +++ b/src/environment_manager.py @@ -440,7 +440,7 @@ def validate_commands(self): ' -b '.join([''] + reqs.get('ncl', [])), ' -c '.join([''] + reqs.get('Rscript', [])) ] - return [''.join(command)] + return [''.join(command).replace('(','\(').replace(')','\)')] def runtime_exception_handler(self, exc): """Handler which is called if an exception is raised during the POD's diff --git a/src/html/pod_error_snippet.html b/src/html/pod_error_snippet.html index 04f00cddd..30901387b 100644 --- a/src/html/pod_error_snippet.html +++ b/src/html/pod_error_snippet.html @@ -1,9 +1,10 @@

{{description}} failed to execute: - error log; + plots, + error log, data used.

Driver script: {{driver}} -
\ No newline at end of file + diff --git a/src/output_manager.py b/src/output_manager.py index fab209250..6a3bdd479 100644 --- a/src/output_manager.py +++ b/src/output_manager.py @@ -7,6 +7,7 @@ import glob import io import shutil +import yaml from src import util, verify_links import logging @@ -264,6 +265,23 @@ def cleanup_pod_files(self): for f in util.find_files(self.WORK_DIR, 'model/netCDF/*.nc'): os.remove(f) + def cleanup_pp_data(self): + """Removes nc files found in catalog if the ``save_pp_data`` data + is set to false. + + This is done by looping through the ``case_info.yml`` file found in each + POD. If the .nc file exists, it is then deleted. + """ + if not self.save_nc: + for f in util.find_files(self.WORK_DIR, 'case_info.yml'): + case_info_yml = yaml.safe_load(open(f)) + for case in case_info_yml['CASE_LIST']: + for k in case_info_yml['CASE_LIST'][case]: + if k.endswith('FILE') or k.endswith('FILES'): + v = case_info_yml['CASE_LIST'][case][k] + if v != '' and os.path.exists(v) and v.endswith('.nc'): + os.remove(v) + def make_output(self, config: util.NameSpace): """Top-level method to make POD-specific output, post-init. Split off into its own method to make subclassing easier. @@ -281,6 +299,7 @@ def make_output(self, config: util.NameSpace): self.convert_pod_figures(os.path.join('model', 'PS'), 'model') self.convert_pod_figures(os.path.join('obs', 'PS'), 'obs') self.cleanup_pod_files() + self.cleanup_pp_data() class HTMLOutputManager(AbstractOutputManager, diff --git a/src/pod_setup.py b/src/pod_setup.py index 46f3d5f67..5cd188984 100644 --- a/src/pod_setup.py +++ b/src/pod_setup.py @@ -148,6 +148,7 @@ def verify_pod_settings(self): value[0]) from exc def verify_runtime_reqs(runtime_reqs: dict): + pod_env = "" for k, v in runtime_reqs.items(): if any(v): pod_env = k @@ -172,6 +173,7 @@ def verify_runtime_reqs(runtime_reqs: dict): pass else: self.log.info(f"Checking {e} for {self.name} package requirements") + conda_root = self.pod_env_vars['CONDA_ROOT'] if os.path.exists(os.path.join(conda_root, "bin/conda")): args = [os.path.join(conda_root, "bin/conda"), 'list', @@ -297,15 +299,16 @@ def setup_pod(self, runtime_config: util.NameSpace, for case_name, case_dict in runtime_config.case_list.items(): cases[case_name].read_varlist(self, append_vars=append_vars) - # Translate the varlistEntries from the POD convention to the data convention if desired and the pod - # convention does not match the case convention + # Translate the varlistEntries from the POD convention to the data convention for the query if desired data_convention = case_dict.convention.lower() - if runtime_config.translate_data and pod_convention != data_convention: - self.log.info(f'Translating POD variables from {pod_convention} to {data_convention}') - else: - data_convention = 'no_translation' - self.log.info(f'POD convention and data convention are both {pod_convention}. ' + if not runtime_config.translate_data: + self.log.info(f'Runtime option translate_data is set to .false. ' f'No data translation will be performed for case {case_name}.') + data_convention = 'no_translation' + else: + if pod_convention != data_convention: + self.log.info(f'Translating POD variables from {pod_convention} to {data_convention}') + # A 'noTranslationFieldlist' will be defined for the varlistEntry translation attribute for v in pod_input.varlist.keys(): for v_entry in cases[case_name].varlist.iter_vars(): diff --git a/src/preprocessor.py b/src/preprocessor.py index 9c727da45..c15fc5c3d 100644 --- a/src/preprocessor.py +++ b/src/preprocessor.py @@ -12,7 +12,6 @@ from src.util import datelabel as dl import cftime import intake -import math import numpy as np import xarray as xr import collections @@ -67,7 +66,6 @@ class PreprocessorFunctionBase(abc.ABC): function is capable of converting into the format requested by the POD. - :meth:`process`, which actually implements the data format conversion. """ - def __init__(self, *args): """Called during Preprocessor's init.""" pass @@ -99,6 +97,36 @@ def execute(self, var: varlist_util.VarlistEntry, pass +class PercentConversionFunction(PreprocessorFunctionBase): + """A PreprocessorFunction which convers the dependent variable's units and values, + for the specific case of percentages. ``0-1`` are not defined in the UDUNITS-2 + library. So, this function handles the case where we have to convert from + ``0-1`` to ``%``. + """ + + _std_name_tuple = ('0-1', '%') + + def execute(self, var, ds, **kwargs): + var_unit = getattr(var, "units", "") + tv = var.translation #abbreviate + tv_unit = getattr(tv, "units", "") + # 0-1 to % + if str(tv_unit) == self._std_name_tuple[0] and str(var_unit) == self._std_name_tuple[1]: + ds[tv.name].attrs['units'] = '%' + ds[tv.name].values = ds[tv.name].values*100 + return ds + # % to 0-1 + if str(tv_unit) == self._std_name_tuple[1] and str(var_unit) == self._std_name_tuple[0]: + ds[tv.name].attrs['units'] = '0-1' + # sometimes % is [0,1] already + if ds[tv.name].values[:, :, 3].max() < 1.5: + return ds + else: + ds[tv.name].values = ds[tv.name].values/100 + return ds + + return ds + class PrecipRateToFluxFunction(PreprocessorFunctionBase): """A PreprocessorFunction which converts the dependent variable's units, for the specific case of precipitation. Flux and precip rate differ by a factor @@ -241,8 +269,9 @@ def execute(self, var, ds, **kwargs): """ tv = var.translation # abbreviate # convert dependent variable + # Note: may need to define src_unit = ds[tv.name].units or similar ds = units.convert_dataarray( - ds, tv.name, src_unit=None, dest_unit=var.units, log=var.log + ds, tv.name, src_unit=None, dest_unit=var.units.units, log=var.log ) tv.units = var.units @@ -251,8 +280,13 @@ def execute(self, var, ds, **kwargs): if c.axis == 'T': continue # TODO: separate function to handle calendar conversion dest_c = var.axes[c.axis] + src_units = None + for v in ds.variables: + if hasattr(ds[v], 'standard_name'): + if ds[v].standard_name == dest_c.standard_name: + src_units = ds[v].units ds = units.convert_dataarray( - ds, c.standard_name, src_unit=None, dest_unit=dest_c.units, log=var.log + ds, c.standard_name, src_unit=src_units, dest_unit=dest_c.units, log=var.log ) if c.has_bounds and c.bounds_var.name in ds: ds = units.convert_dataarray( @@ -690,7 +724,7 @@ def _functions(self): """ # normal operation: run all functions return [ - AssociatedVariablesFunction, + AssociatedVariablesFunction, PercentConversionFunction, PrecipRateToFluxFunction, ConvertUnitsFunction, ExtractLevelFunction, RenameVariablesFunction ] @@ -709,7 +743,9 @@ def cast_to_cftime(self, dt: datetime.datetime, calendar): ('tm_year', 'tm_mon', 'tm_mday', 'tm_hour', 'tm_min', 'tm_sec')) return cftime.datetime(*tt, calendar=calendar) - def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: str): + def check_time_bounds(self, ds: xr.Dataset, + var: translation.TranslatedVarlistEntry, + freq: str): """Parse quantities related to the calendar for time-dependent data and truncate the date range of model dataset *ds*. @@ -719,7 +755,7 @@ def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: s `__ objects so that they can be compared with the model data's time axis. """ - # TODO make time bound checks less restrictive for mon and longer data + dt_range = var.T.range ds_decode = xr.decode_cf(ds, use_cftime=True) t_coord = ds_decode[var.T.name] @@ -742,20 +778,20 @@ def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: s # do not begin at hour zero if dt_range.start.lower.hour != t_start.hour: var.log.info("Variable %s data starts at hour %s", var.full_name, t_start.hour) - dt_start_upper_new = datetime.datetime(dt_range.start.upper.year, - dt_range.start.upper.month, - dt_range.start.upper.day, + dt_start_lower_new = datetime.datetime(t_start.year, + t_start.month, + t_start.day, t_start.hour, t_start.minute, t_start.second) - dt_start_upper = self.cast_to_cftime(dt_start_upper_new, cal) + dt_start_lower = self.cast_to_cftime(dt_start_lower_new, cal) else: - dt_start_upper = self.cast_to_cftime(dt_range.start.upper, cal) + dt_start_lower = self.cast_to_cftime(dt_range.start.lower, cal) if dt_range.end.lower.hour != t_end.hour: var.log.info("Variable %s data ends at hour %s", var.full_name, t_end.hour) - dt_end_lower_new = datetime.datetime(dt_range.end.lower.year, - dt_range.end.lower.month, - dt_range.end.lower.day, + dt_end_lower_new = datetime.datetime(t_end.year, + t_end.month, + t_end.day, t_end.hour, t_end.minute, t_end.second) @@ -765,10 +801,10 @@ def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: s # only check that up to monthly precision for monthly or longer data if freq in ['mon', 'year']: - if t_start.year > dt_start_upper.year or \ - t_start.year == dt_start_upper.year and t_start.month > dt_start_upper.month: + if t_start.year > dt_start_lower.year or \ + t_start.year == dt_start_lower.year and t_start.month > dt_start_lower.month: err_str = (f"Error: dataset start ({t_start}) is after " - f"requested date range start ({dt_start_upper}).") + f"requested date range start ({dt_start_lower}).") var.log.error(err_str) raise IndexError(err_str) if t_end.year < dt_end_lower.year or \ @@ -778,9 +814,9 @@ def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: s var.log.error(err_str) raise IndexError(err_str) else: - if t_start > dt_start_upper: + if t_start > dt_start_lower: err_str = (f"Error: dataset start ({t_start}) is after " - f"requested date range start ({dt_start_upper}).") + f"requested date range start ({dt_start_lower}).") var.log.error(err_str) raise IndexError(err_str) if t_end < dt_end_lower: @@ -802,59 +838,152 @@ def normalize_group_time_vals(self, time_vals: np.ndarray) -> np.ndarray: time_vals[i] = '0' + time_vals[i] return time_vals - def check_group_daterange(self, group_df: pd.DataFrame, case_dr, + def drop_attributes(self, xr_ds: xr.Dataset) -> xr.Dataset: + """ Drop attributes that cause conflicts with xarray dataset merge""" + drop_atts = ['average_T2', + 'time_bnds', + 'lat_bnds', + 'lon_bnds', + 'average_DT', + 'average_T1', + 'height', + 'date'] + for att in drop_atts: + if xr_ds.get(att, None) is not None: + xr_ds = xr_ds.drop_vars(att) + return xr_ds + + def check_multichunk(self, group_df: pd.DataFrame, case_dr, log) -> pd.DataFrame: + """Sort the files found by date, grabs the files whose 'chunk_freq' is the + largest number where endyr-startyr modulo 'chunk_freq' is zero and throws out + the rest. + + Args: + group_df (Pandas Dataframe): + case_dr: requested daterange of POD + log: log file + """ + chunks = group_df['chunk_freq'].unique() + if len(chunks) > 1: + for i, c in enumerate(chunks): + chunks[i] = int(c.replace('yr', '')) + chunks = -np.sort(-chunks) + case_dt = int(str(case_dr.end)[:4]) - int(str(case_dr.start)[:4]) + 1 + for c in chunks: + if case_dt % c == 0: + grabbed_chunk = str(c) + 'yr' + log.warning("Multiple values for 'chunk_freq' found in dataset " + "only grabbing data with 'chunk_freq': %s", grabbed_chunk) + break + group_df = group_df[group_df['chunk_freq'] == grabbed_chunk] + return pd.DataFrame.from_dict(group_df).reset_index() + + def crop_date_range(self, case_date_range: util.DateRange, xr_ds, time_coord) -> xr.Dataset: + xr_ds = self.drop_attributes(xr_ds) + xr_ds = xr.decode_cf(xr_ds, + decode_coords=True, # parse coords attr + decode_times=True, + use_cftime=True # use cftime instead of np.datetime6 + ) + cal = xr_ds[time_coord.name].attrs.get('calendar', 'noleap') + + ds_date_time = xr_ds[time_coord.name].values + ds_start_time = ds_date_time[0] + ds_end_time = ds_date_time[-1] + # force hours in dataset to match date range if frequency is daily, monthly, annual + if ds_start_time.hour != case_date_range.start_datetime.hour and case_date_range.precision < 4: + dt_start_new = datetime.datetime(ds_start_time.year, + ds_start_time.month, + ds_start_time.day, + ds_start_time.hour, + ds_start_time.minute, + ds_start_time.second) + ds_start = self.cast_to_cftime(dt_start_new, cal) + else: + ds_start = self.cast_to_cftime(ds_start_time, cal) + if ds_end_time.hour != case_date_range.end_datetime.hour and case_date_range.precision < 4: + dt_end_new = datetime.datetime(ds_end_time.year, + ds_end_time.month, + ds_end_time.day, + ds_end_time.hour, + ds_end_time.minute, + ds_end_time.second) + ds_end = self.cast_to_cftime(dt_end_new, cal) + else: + ds_end = self.cast_to_cftime(ds_end_time, cal) + date_range_cf_start = self.cast_to_cftime(case_date_range.start.lower, cal) + date_range_cf_end = self.cast_to_cftime(case_date_range.end.lower, cal) + + if ds_start < date_range_cf_start and ds_end < date_range_cf_start or \ + ds_end > date_range_cf_end and ds_start > date_range_cf_end: + new_xr_ds = None + # dataset falls entirely within user-specified date range + elif ds_start >= date_range_cf_start and ds_end <= date_range_cf_end: + new_xr_ds = xr_ds.sel({time_coord.name: slice(ds_start, ds_end)}) + # dataset overlaps user-specified date range start + elif date_range_cf_start < ds_start and \ + date_range_cf_start <= ds_end <= date_range_cf_end: + new_xr_ds = xr_ds.sel({time_coord.name: slice(date_range_cf_start, ds_end)}) + # dataset overlaps user-specified date range end + elif date_range_cf_start < ds_start <= date_range_cf_end <= ds_end: + new_xr_ds = xr_ds.sel({time_coord.name: slice(ds_start, date_range_cf_end)}) + # dataset contains all of requested date range + elif date_range_cf_start>=ds_start and date_range_cf_end<=ds_end: + new_xr_ds = xr_ds.sel({time_coord.name: slice(date_range_cf_start, date_range_cf_end)}) + + return new_xr_ds + + def check_group_daterange(self, df: pd.DataFrame, date_range: util.DateRange, log=_log) -> pd.DataFrame: """Sort the files found for each experiment by date, verify that the date ranges contained in the files are contiguous in time and that the date range of the files spans the query date range. Args: - group_df (Pandas Dataframe): + df (Pandas Dataframe): + date_range: requested daterange of POD log: log file """ date_col = "date_range" - delimiters = ",.!?/&-:;@_'\\s+" - if not hasattr(group_df, 'start_time') or not hasattr(group_df, 'end_time'): - if hasattr(group_df, 'time_range'): - start_times = [] - end_times = [] - for tr in group_df['time_range'].values: - tr = tr.split('-') - start_times.append(tr[0]) - end_times.append(tr[1]) - group_df['start_time'] = pd.Series(start_times) - group_df['end_time'] = pd.Series(end_times) - else: - raise AttributeError('Data catalog is missing attributes `start_time` and/or' - ' `end_time` and can not infer from `time_range`') + if hasattr(df, 'time_range'): + start_times = [] + end_times = [] + for tr in df['time_range'].values: + tr = tr.replace(' ', '').replace('-', '').replace(':', '') + start_times.append(tr[0:len(tr)//2]) + end_times.append(tr[len(tr)//2:]) + df['start_time'] = pd.Series(start_times) + df['end_time'] = pd.Series(end_times) + else: + raise AttributeError('Data catalog is missing the attribute `time_range`;' + ' this is a required entry.') try: - start_time_vals = self.normalize_group_time_vals(group_df['start_time'].values.astype(str)) - end_time_vals = self.normalize_group_time_vals(group_df['end_time'].values.astype(str)) + start_time_vals = self.normalize_group_time_vals(df['start_time'].values.astype(str)) + end_time_vals = self.normalize_group_time_vals(df['end_time'].values.astype(str)) if not isinstance(start_time_vals[0], datetime.date): date_format = dl.date_fmt(start_time_vals[0]) # convert start_times to date_format for all files in query - group_df['start_time'] = start_time_vals - group_df['start_time'] = group_df['start_time'].apply(lambda x: + df['start_time'] = start_time_vals + df['start_time'] = df['start_time'].apply(lambda x: datetime.datetime.strptime(x, date_format)) # convert end_times to date_format for all files in query - group_df['end_time'] = end_time_vals - group_df['end_time'] = group_df['end_time'].apply(lambda x: - datetime.datetime.strptime(x, date_format)) + df['end_time'] = end_time_vals + df['end_time'] = df['end_time'].apply(lambda x: + datetime.datetime.strptime(x, date_format)) # method throws ValueError if ranges aren't contiguous - dates_df = group_df.loc[:, ['start_time', 'end_time']] + dates_df = df.loc[:, ['start_time', 'end_time']] date_range_vals = [] - for idx, x in enumerate(group_df.values): + for idx, x in enumerate(df.values): st = dates_df.at[idx, 'start_time'] en = dates_df.at[idx, 'end_time'] date_range_vals.append(util.DateRange(st, en)) - group_df = group_df.assign(date_range=date_range_vals) + group_df = df.assign(date_range=date_range_vals) sorted_df = group_df.sort_values(by=date_col) files_date_range = util.DateRange.from_contiguous_span( *(sorted_df[date_col].to_list()) ) # throws AssertionError if we don't span the query range - # TODO: define self.attrs.DateRange from runtime config info # assert files_date_range.contains(self.attrs.date_range) # throw out df entries not in date_range return_df = [] @@ -863,12 +992,13 @@ def check_group_daterange(self, group_df: pd.DataFrame, case_dr, if pd.isnull(cat_row['start_time']): continue else: - st = dl.dt_to_str(cat_row['start_time']) - et = dl.dt_to_str(cat_row['end_time']) - stin = dl.Date(st) in case_dr - etin = dl.Date(et) in case_dr - if stin and etin: - return_df.append(cat_row.to_dict()) + ds_st = cat_row['start_time'] + ds_et = cat_row['end_time'] + # date range includes entire or part of dataset + if ds_st>=date_range.start.lower and ds_et=date_range.start.lower or \ + ds_st <= date_range.end.lower < ds_et: + return_df.append(cat_row) return pd.DataFrame.from_dict(return_df) except ValueError: @@ -881,6 +1011,7 @@ def check_group_daterange(self, group_df: pd.DataFrame, case_dr, # hit an exception; return empty DataFrame to signify failure return pd.DataFrame(columns=group_df.columns) + def query_catalog(self, case_dict: dict, data_catalog: str, @@ -906,38 +1037,17 @@ def query_catalog(self, if 'date_range' not in [c.lower() for c in cols]: cols.append('date_range') - drop_atts = ['average_T2', - 'time_bnds', - 'lat_bnds', - 'lon_bnds', - 'average_DT', - 'average_T1', - 'height', - 'date'] - for case_name, case_d in case_dict.items(): # path_regex = re.compile(r'(?i)(? 0: - for s in self.user_pp_scripts: - script_name, script_ext = os.path.splitext(s) - full_module_name = "user_scripts." + script_name - user_module = importlib.import_module(full_module_name, package=None) - # Call function with the arguments - # user_scripts.example_pp_script.main(xarray_ds, v) - xarray_ds = user_module.main(xarray_ds, v.name) + if hasattr(self, 'user_pp_scripts'): + if self.user_pp_scripts and len(self.user_pp_scripts) > 0: + for s in self.user_pp_scripts: + script_name, script_ext = os.path.splitext(s) + full_module_name = "user_scripts." + script_name + user_module = importlib.import_module(full_module_name, package=None) + # Call function with the arguments + # user_scripts.example_pp_script.main(xarray_ds, v) + xarray_ds = user_module.main(xarray_ds, v.name) return xarray_ds + def setup(self, pod): """Method to do additional configuration immediately before :meth:`process` is called on each variable for *pod*. Implements metadata cleaning via @@ -1104,6 +1229,7 @@ def open_dataset_kwargs(self): "decode_times": False, "use_cftime": False, "chunks": "auto" + } @property @@ -1386,26 +1512,27 @@ def write_pp_catalog(self, # each key is a case for case_name, case_dict in cases.items(): ds_match = input_catalog_ds[case_name] + ds_match.time.values.sort() for var in case_dict.varlist.iter_vars(): - ds_var = ds_match.data_vars.get(var.translation.name, None) + var_name = var.translation.name + ds_var = ds_match.data_vars.get(var_name, None) if ds_var is None: - log.error(f'No var {var.translation.name}') + log.error(f'No var {var_name}') d = dict.fromkeys(columns, "") for key, val in ds_match.attrs.items(): if 'intake_esm_attrs' in key: for c in columns: if key.split('intake_esm_attrs:')[1] == c: d[c] = val - if var.translation.convention == 'no_translation': - d.update({'project_id': var.convention}) - else: - d.update({'project_id': var.translation.convention}) + + d.update({'project_id': var.translation.convention}) d.update({'path': var.dest_path}) - d.update({'start_time': util.cftime_to_str(input_catalog_ds[case_name].time.values[0])}) - d.update({'end_time': util.cftime_to_str(input_catalog_ds[case_name].time.values[-1])}) + d.update({'time_range': f'{util.cftime_to_str(ds_match.time.values[0]).replace('-',':')}-' + f'{util.cftime_to_str(ds_match.time.values[-1]).replace('-',':')}'}) + d.update({'standard_name': ds_match[var.name].attrs['standard_name']}) cat_entries.append(d) - # create a Pandas dataframe romthe catalog entries + # create a Pandas dataframe from the catalog entries cat_df = pd.DataFrame(cat_entries) cat_df.head() @@ -1504,11 +1631,12 @@ def __init__(self, # initialize PreprocessorFunctionBase objects super().__init__(model_paths, config) self.file_preproc_functions = [f for f in self._functions] - if any([s for s in config.user_pp_scripts]): - self.add_user_pp_scripts(config) - self.module_root = os.path.join(config.CODE_ROOT, "user_scripts") - else: - self.user_pp_scripts = None + if hasattr(config, 'user_pp_scripts'): + if any([s for s in config.user_pp_scripts]): + self.add_user_pp_scripts(config) + self.module_root = os.path.join(config.CODE_ROOT, "user_scripts") + else: + self.user_pp_scripts = None def add_user_pp_scripts(self, runtime_config: util.NameSpace): self.user_pp_scripts = [s for s in runtime_config.user_pp_scripts] diff --git a/src/translation.py b/src/translation.py index 4f45c70df..9590763b0 100644 --- a/src/translation.py +++ b/src/translation.py @@ -106,6 +106,8 @@ def _process_var(section_name: str, in_dict, lut_dict): lut_dict['entries'][k].update({'long_name': ""}) if 'scalar_coord_templates' in v: sct_dict.update({k: v['scalar_coord_templates']}) + if 'alternate_standard_names' not in v: + lut_dict['entries'][k].update({'alternate_standard_names': list()}) return lut_dict, sct_dict d['axes_lut'] = util.WormDict() @@ -150,12 +152,13 @@ def to_CF_standard_name(self, standard_name: str, precip_vars = ['precipitation_rate', 'precipitation_flux'] # search the lookup table for the variable with the specified standard_name # realm, modifier, and long_name attributes + for var_name, var_dict in self.lut.items(): - if var_dict['standard_name'] == standard_name\ + if var_dict['standard_name'] == standard_name \ and var_dict['realm'] == realm\ and var_dict['modifier'] == modifier: - if not var_dict['long_name'] or var_dict['long_name'].lower() == long_name.lower(): - return var_name + # if not var_dict['long_name'] or var_dict['long_name'].lower() == long_name.lower(): + return var_name else: if var_dict['standard_name'] in precip_vars and standard_name in precip_vars: return var_name @@ -176,7 +179,7 @@ def from_CF(self, TODO: expand with more ways to uniquely identify variable (eg cell methods). Args: standard_name: variable or name of the variable - realm: variable realm (atmos, ocean, land, ice, etc...) + realm: str variable realm (atmos, ocean, land, seaIce, etc...) modifier:optional string to distinguish a 3-D field from a 4-D field with the same var_or_name value long_name: str (optional) long name attribute of the variable @@ -213,8 +216,8 @@ def from_CF_name(self, convention. Args: - var_or_name: variable or name of the variable - realm: model realm of variable + var_or_name: str, variable or name of the variable + realm: str model realm of variable long_name: str (optional): long_name attribute of the variable modifier:optional string to distinguish a 3-D field from a 4-D field with the same var_or_name value @@ -253,7 +256,7 @@ def create_scalar_name(self, old_coord, new_coord: dict, var_id: str, log=_log) # construct convention's name for this variable on a level name_template = self.scalar_coord_templates[var_id][key] if new_coord.units.strip('').lower() == 'pa': - val = int(new_coord.value/100) + val = int(new_coord.value / 100) else: val = int(new_coord.value) @@ -304,8 +307,8 @@ def translate_coord(self, coord, class_dict=None, log=_log) -> dict: lut_val = v.get('value') if isinstance(coord.value, int) and isinstance(lut_val, str): v_int = int(float(lut_val)) - if v_int > coord.value and v_int/coord.value == 100 \ - or v_int < coord.value and coord.value/v_int == 100 or \ + if v_int > coord.value and v_int / coord.value == 100 \ + or v_int < coord.value and coord.value / v_int == 100 or \ v_int == coord.value: new_coord = v break @@ -331,13 +334,16 @@ def translate_coord(self, coord, class_dict=None, log=_log) -> dict: new_coord = v break else: - new_coord = [lut1.values()][0] + new_coord = [lut1[k] for k in lut1.keys()][0] # should return ordered dict if hasattr(coord, 'is_scalar') and coord.is_scalar: coord_name = "" - if new_coord.get('name', None): + if hasattr(new_coord, 'name'): coord_name = new_coord['name'] - elif new_coord.get('out_name', None): + elif hasattr(new_coord, 'out_name'): coord_name = new_coord['out_name'] + else: # TODO add more robust check for key name == 'plev' (or whatever the coordinate name in the lut should be based on fieldlist) + coord_name = [k for k in lut1.keys()][0] + coord_copy = copy.deepcopy(new_coord) coord_copy['value'] = units.convert_scalar_coord(coord, coord_copy['units'], @@ -373,7 +379,6 @@ def translate(self, var, from_convention: str): from_convention_tl = VariableTranslator().get_convention(from_convention) # Fieldlist entry for POD variable long_name = self.get_variable_long_name(var, has_scalar_coords) - fl_entries = from_convention_tl.from_CF(var.standard_name, var.realm, var.modifier, @@ -430,7 +435,7 @@ def translate(self, var, from_convention: str): ) -class NoTranslationFieldlist(metaclass=util.Singleton): +class NoTranslationFieldlist: """Class which partially implements the :class:`Fieldlist` interface but does no variable translation. :class:`~diagnostic.VarlistEntry` objects from the POD are passed through to create :class:`TranslatedVarlistEntry` objects. @@ -471,30 +476,49 @@ def translate_coord(self, coord, log=_log) -> TranslatedVarlistEntry: # should never get here - not called externally raise NotImplementedError - def translate(self, var, from_convention: str): + def translate(self, var, data_convention: str): """Returns :class:`TranslatedVarlistEntry` instance, populated with contents of input :class:`~diagnostic.VarlistEntry` instance. - .. note:: + note:: We return a copy of the :class:`~diagnostic.VarlistEntry` because logic in :class:`~xr_parser.DefaultDatasetParser` alters the translation based on the file's actual contents. """ coords_copy = copy.deepcopy(var.dims) + copy.deepcopy(var.scalar_coords) - # TODO: coerce_to_dataclass runs into recursion limit on var; fix that + fieldlist_obj = VariableTranslator().get_convention(data_convention) + fieldlist_entry = dict() + var_id = "" + for variable_id, variable_id_dict in fieldlist_obj.lut.items(): + if variable_id_dict.get('standard_name', None) == var.standard_name \ + or var.standard_name in variable_id_dict.get('alternate_standard_names'): + if variable_id_dict.get('realm', None) == var.realm \ + and variable_id_dict.get('units', None) == var.units.units: + fieldlist_entry = variable_id_dict + var_id = variable_id + break + if len(fieldlist_entry.keys()) < 1: + var.log.error(f'No {data_convention} fieldlist entry found for variable {var.name}') + return None + alt_standard_names = fieldlist_entry.get('alternate_standard_names') return TranslatedVarlistEntry( - name=var.name, + name=var_id, standard_name=var.standard_name, units=var.units, - convention=_NO_TRANSLATION_CONVENTION, + convention=var.convention, coords=coords_copy, modifier=var.modifier, + alternate_standard_names=alt_standard_names, + realm=var.realm, log=var.log ) class VariableTranslator(metaclass=util.Singleton): - """:class:`~util.Singleton` containing information for different variable + """The use of class:`~util.Singleton` means that the VariableTranslator is not a + base class. Instead, it is a metaclass that needs to be created only once (done + in the mdtf_framework.py driver script to hold all the information from the fieldlist + tables that are later shared. Instead, the SUBCLASSES of the VariableTranslator are customized information for different variable naming conventions. These are defined in the ``data/fieldlist_*.jsonc`` files. """ diff --git a/src/units.py b/src/units.py index fc3956cad..57d35334e 100644 --- a/src/units.py +++ b/src/units.py @@ -106,7 +106,7 @@ def units_equal(*args, rtol=None): """Returns True if and only if all unit-ful quantities in *args* are strictly equal (:func:`units_equivalent` is True and :func:`conversion_factor` = 1). - .. note:: + . note:: rtol, atol tolerances on floating-point equality are not currently implemented in cfunits, so we use :func:`relative_tol`. @@ -135,6 +135,8 @@ def conversion_factor(source_unit, dest_unit): *source_unit*, *dest_unit* are coerced to :class:`Units` objects via :func:`to_cfunits`. """ + if str(source_unit) == str(dest_unit): + return 1.0 # bypass function if the units have the same string allowing units like '0-1' to be used source_unit, dest_unit = to_equivalent_units(source_unit, dest_unit) return Units.conform(1.0, source_unit, dest_unit) @@ -186,7 +188,8 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) Dataset *ds*, with *da_name* modified in-place. """ da = ds.get(da_name, None) - var_name = da_name + + search_attrs = ['standard_name', 'long_name'] if da is None: # search attributes for standard_name or long_name that matches input name @@ -200,7 +203,7 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) if isinstance(att, str): if att == da_name: da = dset - var_name = var + da_name = var break # try to find approximate matches to input name in standard_name and long_name attributes if da is None: @@ -216,7 +219,7 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) log.info("Found approximate match for %s in dataset %s attribute %s", da_name, attr, att_value) da = dset - var_name = var + da_name = var break if da is None: raise ValueError(f"convert_dataarray: '{da_name}' not found in dataset.") @@ -236,8 +239,14 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) std_name = f"{da.attrs['standard_name']}" elif 'long_name' in da.attrs: std_name = f"{da.attrs['long_name']}" - ds[var_name].attrs['standard_name'] = std_name.replace(' ', '_') - + ds[da_name].attrs['standard_name'] = std_name.replace(' ', '_') + + # udunits does not recognize mb == hPa, so hardcode correction + if src_unit == 'mb': + ds[da_name].attrs['units'] = 'hPa' + src_unit = 'hPa' + if dest_unit == 'mb': + dest_unit = 'hPa' if units_equal(src_unit, dest_unit): log.debug(("Source, dest units of '%s'%s identical (%s); no conversion " "done."), da.name, std_name, dest_unit) diff --git a/src/util/catalog.py b/src/util/catalog.py index 268f75b3c..9618b3a75 100644 --- a/src/util/catalog.py +++ b/src/util/catalog.py @@ -69,7 +69,7 @@ def define_pp_catalog_assets(config, cat_file_name: str) -> dict: ) # add columns required for GFDL/CESM institutions and MDTF-diagnostics functionality - append_atts = ['chunk_freq', 'path', 'standard_name', 'start_time', 'end_time'] + append_atts = ['chunk_freq', 'path', 'standard_name', "time_range"] for att in append_atts: cat_dict["attributes"].append( dict(column_name=att) diff --git a/src/util/datelabel.py b/src/util/datelabel.py index 4c9539431..b70473835 100644 --- a/src/util/datelabel.py +++ b/src/util/datelabel.py @@ -20,12 +20,12 @@ Properties and use of :class:`DateRange`, :class:`Date` and :class:`DateFrequency` objects are best illustrated by examples: -.. code-block:: python +. code-block:: python >>> Date('20001215').month 12 - >>> Date('200012') == datetime(2000, 12, 1) + >>> Date('200012') == datetime.datetime(2000, 12, 1) True >>> DateRange('2010-2020') in DateRange('2008-2019') @@ -56,6 +56,8 @@ # match-case statement to give date format # input can be int or str + + def date_fmt(date: str): date_digits = len(date) match date_digits: @@ -72,6 +74,8 @@ def date_fmt(date: str): return fmt # convert a string to a cftime object + + def str_to_cftime(time_str: str, fmt=None, calendar=None): if fmt is None: fmt = date_fmt(time_str) @@ -621,11 +625,15 @@ def __init__(self, start, end=None, precision=None, log=_log): # start: split_str[start index of 0: nelem_half elements total], end[start index at nelem_half, (start, end) = ''.join(split_str[:nelem_half]), ''.join(split_str[nelem_half:]) + elif len(start) == 2: (start, end) = start else: raise ValueError('Bad input ({},{})'.format(start, end)) - + if isinstance(start, str): + start = start.replace(':','') + if isinstance(end, str): + end = end.replace(':','') dt0, prec0 = self._coerce_to_datetime(start, is_lower=True) dt1, prec1 = self._coerce_to_datetime(end, is_lower=False) if not (dt0 < dt1): @@ -1175,7 +1183,7 @@ def _parse_input_string(cls, quantity, unit): s = 'wk' elif s in ['daily', 'day', 'days', 'dy', 'd', 'diurnal', 'diurnally']: s = 'day' - elif s in ['hourly', 'hour', 'hours', 'hr', 'h']: + elif s in ['hourly', 'hour', 'hours', 'hr', 'h', '1hr']: s = 'hr' elif s in ['minutes', 'minute', 'min']: s = 'min' diff --git a/src/varlist_util.py b/src/varlist_util.py index 97332a4a7..f256acd5b 100644 --- a/src/varlist_util.py +++ b/src/varlist_util.py @@ -560,7 +560,8 @@ def setup_var(self, v.dest_path = self.variable_dest_path(model_paths, case_name, v) try: trans_v = translate.translate(v, from_convention) - assert trans_v is not None, f'translation for varlistentry {v.name} failed' + if trans_v is None: + v.log.error(f'translation for varlistEntry {v.name} failed') v.translation = trans_v # copy preferred gfdl post-processing component during translation if hasattr(trans_v, "component"): diff --git a/src/xr_parser.py b/src/xr_parser.py index 01aab0dbd..f7f20a878 100644 --- a/src/xr_parser.py +++ b/src/xr_parser.py @@ -194,7 +194,7 @@ def _old_axes_dict(self, var_name=None): if len(v) > 1 and var_name is not None: ax = [c for c in v if c in itertools.chain.from_iterable(axes_obj.cf.coordinates.values())] del_ax = [d for d in v if d not in itertools.chain.from_iterable(axes_obj.cf.coordinates.values())] - if del_ax is not None: # remove the entries that are not in the cf.coordinates.values dict + if del_ax is not None and len(del_ax) > 0: # remove the entries that are not in the cf.coordinates.values dict # append entries that are in the cf.coordinates.values dict if they are missing in coords_list # and dims_list if del_ax[0] in coords_list: @@ -208,14 +208,15 @@ def _old_axes_dict(self, var_name=None): if ax is not None: vardict[k] = ax - if ax[0] not in coords_list: - _log.warning(("cf_xarray fix: %s axis %s not in dimensions " - "for %s; dropping."), k, ax[0], var_name) - delete_keys.append(k) - else: - coords_list.remove(ax[0]) - if ax[0] in dims_list: - dims_list.remove(ax[0]) + for a in ax: + if a not in coords_list: + _log.warning(("cf_xarray fix: %s axis %s not in dimensions " + "for %s; dropping."), k, a, var_name) + delete_keys.append(k) + else: + coords_list.remove(a) + if a in dims_list: + dims_list.remove(a) elif len(v) == 1: if v[0] not in coords_list: _log.warning(("cf_xarray fix: %s axis %s not in dimensions " @@ -722,7 +723,13 @@ def approximate_attribute_value(self, our_name: str, ds_name: str) -> bool: """Determine if the dataset attribute value is an approximate match to the expected attribute value""" exclude = ["with", "on", "in", "of", "at", "near"] our_name_split = [i for i in our_name.split('_') if i not in exclude] - ds_name_split = [i for i in ds_name.split('_') if i not in exclude] + if isinstance(ds_name, str): + ds_name_split = [i for i in ds_name.split('_') if i not in exclude] + elif isinstance(ds_name, list): + for n in ds_name: + print(n) + ds_name_split = [i for i in ds_name[0].split('_')] + isect = set(our_name_split).intersection(ds_name_split) if len(isect) >= len(our_name_split) - 2 and len(isect) > 0: @@ -757,8 +764,10 @@ def compare_attr(self, our_attr_tuple, ds_attr_tuple, comparison_func=None, - False: Change *ds* to match *our_var*. """ # unpack tuples + our_var, our_attr_name, our_attr = our_attr_tuple ds_var, ds_attr_name, ds_attr = ds_attr_tuple + if comparison_func is None: comparison_func = (lambda x, y: x == y) @@ -821,8 +830,8 @@ def compare_attr(self, our_attr_tuple, ds_attr_tuple, comparison_func=None, else: comparison_func = self.approximate_attribute_value(our_attr, ds_attr) if not comparison_func: - raise util.MetadataEvent((f"Unexpected {our_attr_name} for variable " - f"'{our_var.name}': '{ds_attr}' (expected '{our_attr}').")) + self.log.warning(f"Unexpected {our_attr_name} for variable " + f"'{our_var.name}': '{ds_attr}' (expected '{our_attr}').") else: self.log.warning(f"Could not find exact match for {our_var.name} attribute {our_attr_name}" f"{our_attr}; data processing will proceed with approximate match {ds_attr}") @@ -866,6 +875,8 @@ def reconcile_attr(self, our_var, ds_var, our_attr_name, ds_attr_name=None, """Compare attribute of a :class:`~src.data_model.DMVariable` (*our_var*) with what's set in the xarray.Dataset (*ds_var*). """ + if ds_var is None: + return if ds_attr_name is None: ds_attr_name = our_attr_name our_attr = getattr(our_var, our_attr_name) @@ -946,6 +957,7 @@ def reconcile_units(self, our_var, ds_var): # will raise UnitsUndefinedError or log warning if unit attribute missing self.check_metadata(ds_var, 'units') # Check equivalence of units: if units are not equivalent, raise MetadataEvent + self.reconcile_attr(our_var, ds_var, 'units', comparison_func=units.units_equivalent, fill_ours=True, fill_ds=True @@ -1060,28 +1072,38 @@ def reconcile_coord_bounds(self, our_coord, ds, ds_coord_name): expectations based on the model's convention (*our_var*), for the bounds on the dimension coordinate *our_coord*. """ - try: + if len(ds.cf.bounds) > 0: bounds = ds.cf.get_bounds(ds_coord_name) - except KeyError: - # cf accessor could't find associated bounds variable + elif hasattr(ds[ds_coord_name], 'attrs'): + if ds[ds_coord_name].attrs.get('bounds', None): + bounds = ds[ds_coord_name].bounds + if isinstance(bounds, str): + our_coord.bounds_var = None + return + else: + our_coord.bounds_var = None + return + else: + # cf accessor couldn't find associated bounds variable + bounds = None our_coord.bounds_var = None return - # Inherit standard_name from our_coord if not present (regardless of # skip_std_name), overwriting metadata on bounds if different - self.reconcile_attr(our_coord, bounds, 'standard_name', - fill_ours=False, fill_ds=True, overwrite_ours=False - ) - # Inherit units from our_coord if not present (regardless of skip_units), - # overwriting metadata on bounds if different - self.reconcile_attr(our_coord, bounds, 'units', - comparison_func=units.units_equal, - fill_ours=False, fill_ds=True, overwrite_ours=False - ) - if our_coord.name != bounds.name: - self.log.debug("Updating %s for '%s' to value '%s' from dataset.", - 'bounds', our_coord.name, bounds.name) - our_coord.bounds_var = bounds + if bounds is not None: + self.reconcile_attr(our_coord, bounds, 'standard_name', + fill_ours=False, fill_ds=True, overwrite_ours=False + ) + # Inherit units from our_coord if not present (regardless of skip_units), + # overwriting metadata on bounds if different + self.reconcile_attr(our_coord, bounds, 'units', + comparison_func=units.units_equal, + fill_ours=False, fill_ds=True, overwrite_ours=False + ) + if our_coord.name != bounds.name: + self.log.debug("Updating %s for '%s' to value '%s' from dataset.", + 'bounds', our_coord.name, bounds.name) + our_coord.bounds_var = bounds def reconcile_dimension_coords(self, our_var, ds): """Reconcile name, standard_name and units attributes between the @@ -1268,6 +1290,9 @@ def check_metadata(self, ds_var, *attr_names): """Wrapper for :meth:`~DefaultDatasetParser.normalize_attr`, specialized to the case of getting a variable's standard_name. """ + delete_chars = re.compile(r"[\".,'*]") + ds_var.attrs = {delete_chars.sub('', k): v for k, v in ds_var.attrs.items()} + ds_var.encoding = {delete_chars.sub('', k): v for k, v in ds_var.encoding.items()} for attr in attr_names: if attr not in ds_var.attrs: if attr in ds_var.encoding: diff --git a/tests/esm_catalog_test_macos.csv b/tests/esm_catalog_test_macos.csv index 8c57d8a7d..bd856631a 100644 --- a/tests/esm_catalog_test_macos.csv +++ b/tests/esm_catalog_test_macos.csv @@ -7,8 +7,8 @@ CMIP,,,,,,day,,,,,,,,,,wind_speed,,m s-1,atmos,,,,,,sfcWind,,,1,,1990-01-01 00:0 CMIP,,,,,,day,,,,,,,,,,air_temperature,,K ,atmos,,,,,,tas,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.tas.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,surface_temperature,,K,atmos,,,,,,ts,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.ts.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg500,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg500.day.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_latent_heat_flux,,W m-2,atmos,,,,,,hfls,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfls.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_sensible_heat_flux,,W m-2,atmos,,,,,,hfss,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfss.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,specific_humidity,,1,atmos,,,,,,hus,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hus.mon.nc,v0 @@ -33,13 +33,13 @@ CMIP,,,,,,mon,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,32,,1990-01-01 00:0 CMIP,,,,,,mon,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.wap.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,sea_surface_height_above_geoid,,m,ocean,,,,,,zos,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zos.mon.nc,v0 -GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,WVP,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,wvp,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 CESM,,,,,,1hr,,,,,,,,,,precipitation_rate,,m s-1,atmos,,,,,,PRECT,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.PRECT.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,prw,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.prw.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,specific_humidity,,1,atmos,,,,,,qsat,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.qsat_int.1hr.nc,v0 diff --git a/tests/esm_catalog_test_ubuntu.csv b/tests/esm_catalog_test_ubuntu.csv index 2fe35eff9..10f413688 100644 --- a/tests/esm_catalog_test_ubuntu.csv +++ b/tests/esm_catalog_test_ubuntu.csv @@ -7,8 +7,8 @@ CMIP,,,,,,day,,,,,,,,,,wind_speed,,m s-1,atmos,,,,,,sfcWind,,,1,,1990-01-01 00:0 CMIP,,,,,,day,,,,,,,,,,air_temperature,,K ,atmos,,,,,,tas,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.tas.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,surface_temperature,,K,atmos,,,,,,ts,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.ts.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg500,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg500.day.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_latent_heat_flux,,W m-2,atmos,,,,,,hfls,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfls.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_sensible_heat_flux,,W m-2,atmos,,,,,,hfss,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfss.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,specific_humidity,,1,atmos,,,,,,hus,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hus.mon.nc,v0 @@ -33,13 +33,13 @@ CMIP,,,,,,mon,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,32,,1990-01-01 00:0 CMIP,,,,,,mon,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.wap.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,geopotential_height,,m,ocean,,,,,,zg,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,sea_surface_height_above_geoid,,m,ocean,,,,,,zos,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zos.mon.nc,v0 -GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,WVP,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,wvp,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 CESM,,,,,,1hr,,,,,,,,,,precipitation_rate,,m s-1,atmos,,,,,,PRECT,,,1,,1990-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.PRECT.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,prw,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.prw.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,specific_humidity,,1,atmos,,,,,,qsat_int,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.qsat_int.1hr.nc,v0 diff --git a/tests/github_actions_test_macos_set1.jsonc b/tests/github_actions_test_macos_1a.jsonc similarity index 96% rename from tests/github_actions_test_macos_set1.jsonc rename to tests/github_actions_test_macos_1a.jsonc index 61e6813c9..b94276586 100644 --- a/tests/github_actions_test_macos_set1.jsonc +++ b/tests/github_actions_test_macos_1a.jsonc @@ -3,19 +3,16 @@ // as blank lines (JSONC quasi-standard.) { "pod_list": [ - //"convective_transition_diag", //"Wheeler_Kiladis", - //"MJO_suite", - "MJO_teleconnection", "precip_diurnal_cycle" //"EOF_500hPa" ], - "case_list": { + "case_list" : { "NCAR.Synthetic": { "convention" : "CESM", "startdate" : "19750101", "enddate" : "19811231" - } + } }, // PATHS --------------------------------------------------------------------- // Location of supporting data downloaded when the framework was installed. diff --git a/tests/github_actions_test_macos_1b.jsonc b/tests/github_actions_test_macos_1b.jsonc new file mode 100644 index 000000000..c17cf3589 --- /dev/null +++ b/tests/github_actions_test_macos_1b.jsonc @@ -0,0 +1,81 @@ +// Configuration for MDTF-diagnostics driver script self-test using the macOS github action. +// All text to the right of an unquoted "//" is a comment and ignored, as well +// as blank lines (JSONC quasi-standard.) +{ + "pod_list": [ + "MJO_suite", + "MJO_teleconnection" + // "convective_transition_diag" + ], + "case_list" : { + "NCAR.Synthetic" : { + "convention" : "CESM", + "startdate" : "19750101", + "enddate" : "19811231" + } + }, + // PATHS --------------------------------------------------------------------- + // Location of supporting data downloaded when the framework was installed. + + // If a relative path is given, it's resolved relative to the MDTF-diagnostics + // code directory. Environment variables (eg, $HOME) can be referenced with a + // "$" and will be expended to their current values when the framework runs. + "DATA_CATALOG": "./tests/esm_catalog_test_macos.json", + // Parent directory containing observational data used by individual PODs. + "OBS_DATA_ROOT": "../inputdata/obs_data", + + // Working directory. Defaults to working directory if blank. + "WORK_DIR": "../wkdir", + + // Directory to write output. The results of each run of the framework will be + // put in a subdirectory of this directory. + "OUTPUT_DIR": "../wkdir", + + // Location of the Anaconda/miniconda installation to use for managing + // dependencies (path returned ls by running `conda info --base`.) If empty, + // framework will attempt to determine location of system's conda installation. + "conda_root": "/Users/runner/micromamba", + + "micromamba_exe": "/Users/runner/micromamba-bin/micromamba", + + + // Directory containing the framework-specific conda environments. This should + // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left + // blank, the framework will look for its environments in the system default + // location. + "conda_env_root": "/Users/runner/micromamba/envs", + + // SETTINGS ------------------------------------------------------------------ + // Any command-line option recognized by the mdtf script + // can be set here, in the form "flag name": "desired setting". + + // Settings affecting what output is generated: + // Set to true to run the preprocessor; default true: + "run_pp": true, + + // Set to true to perform data translation; default false: + "translate_data": true, + + // Set to true to have PODs save postscript figures in addition to bitmaps. + "save_ps": false, + + // Set to true for files > 4 GB + "large_file": false, + + // If true, leave pp data in OUTPUT_DIR after preprocessing; if false, delete pp data after PODs + // run to completion + "save_pp_data": true, + + // Set to true to save HTML and bitmap plots in a .tar file. + "make_variab_tar": false, + + // Generate html output for multiple figures per case + "make_multicase_figure_html": false, + + // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved + // under a unique name. + "overwrite": false, + // List with custom preprocessing script(s) to run on data + // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository + "user_pp_scripts" : [] +} diff --git a/tests/github_actions_test_macos_set2.jsonc b/tests/github_actions_test_macos_2.jsonc similarity index 100% rename from tests/github_actions_test_macos_set2.jsonc rename to tests/github_actions_test_macos_2.jsonc diff --git a/tests/github_actions_test_macos_set3.jsonc b/tests/github_actions_test_macos_3.jsonc similarity index 97% rename from tests/github_actions_test_macos_set3.jsonc rename to tests/github_actions_test_macos_3.jsonc index a7490eda0..5af660168 100644 --- a/tests/github_actions_test_macos_set3.jsonc +++ b/tests/github_actions_test_macos_3.jsonc @@ -6,10 +6,10 @@ "pod_list": [ //"temp_extremes_distshape",// needs matplotlib 3.7.3, but not avail on conda yet //"tropical_pacific_sea_level", - "ocn_surf_flux_diag", - "mixed_layer_depth", - "seaice_suite", - "stc_eddy_heat_fluxes" + "ocn_surf_flux_diag" + //"mixed_layer_depth", + //"seaice_suite", + //"stc_eddy_heat_fluxes" // "albedofb" ], "case_list" : { diff --git a/tests/github_actions_test_ubuntu_set1.jsonc b/tests/github_actions_test_ubuntu_1a.jsonc similarity index 94% rename from tests/github_actions_test_ubuntu_set1.jsonc rename to tests/github_actions_test_ubuntu_1a.jsonc index 1b2d4bcec..acb962a90 100644 --- a/tests/github_actions_test_ubuntu_set1.jsonc +++ b/tests/github_actions_test_ubuntu_1a.jsonc @@ -3,19 +3,16 @@ // as blank lines (JSONC quasi-standard.) { "pod_list": [ - //"convective_transition_diag", - //"Wheeler_Kiladis", - //"MJO_suite", - "MJO_teleconnection", - "precip_diurnal_cycle" + "Wheeler_Kiladis" + //"precip_diurnal_cycle" //"EOF_500hPa" ], - "case_list" : { + "case_list" : { "NCAR.Synthetic": { "convention" : "CESM", "startdate" : "19750101", "enddate" : "19811231" - } + } }, // PATHS --------------------------------------------------------------------- // Location of supporting data downloaded when the framework was installed. diff --git a/tests/github_actions_test_ubuntu_1b.jsonc b/tests/github_actions_test_ubuntu_1b.jsonc new file mode 100644 index 000000000..eeefa431c --- /dev/null +++ b/tests/github_actions_test_ubuntu_1b.jsonc @@ -0,0 +1,80 @@ +// Configuration for MDTF-diagnostics driver script self-test. +// All text to the right of an unquoted "//" is a comment and ignored, as well +// as blank lines (JSONC quasi-standard.) +{ + "pod_list": [ + //"MJO_suite", + //"MJO_teleconnection", + "convective_transition_diag" + ], + "case_list" : { + "NCAR.Synthetic" : { + "convention" : "CESM", + "startdate" : "19750101", + "enddate" : "19811231" + } + }, + // PATHS --------------------------------------------------------------------- + // Location of supporting data downloaded when the framework was installed. + + // If a relative path is given, it's resolved relative to the MDTF-diagnostics + // code directory. Environment variables (eg, $HOME) can be referenced with a + // "$" and will be expended to their current values when the framework runs. + "DATA_CATALOG": "./tests/esm_catalog_test_ubuntu.json", + // Parent directory containing observational data used by individual PODs. + "OBS_DATA_ROOT": "../inputdata/obs_data", + + // Working directory. Defaults to working directory if blank. + "WORK_DIR": "../wkdir", + + // Directory to write output. The results of each run of the framework will be + // put in a subdirectory of this directory. + "OUTPUT_DIR": "../wkdir", + + // Location of the Anaconda/miniconda installation to use for managing + // dependencies (path returned lsby running `conda info --base`.) If empty, + // framework will attempt to determine location of system's conda installation. + //"conda_root": "/usr/share/miniconda3", + "conda_root": "/home/runner/micromamba", + + "micromamba_exe": "/home/runner/micromamba-bin/micromamba", + // Directory containing the framework-specific conda environments. This should + // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left + // blank, the framework will look for its environments in the system default + // location. + "conda_env_root": "/home/runner/micromamba/envs", + + // SETTINGS ------------------------------------------------------------------ + // Any command-line option recognized by the mdtf script + // can be set here, in the form "flag name": "desired setting". + + // Settings affecting what output is generated: + // Set to true to run the preprocessor; default true: + "run_pp": true, + + // Set to true to perform data translation; default false: + "translate_data": true, + + // Set to true to have PODs save postscript figures in addition to bitmaps. + "save_ps": false, + + // Set to true for files > 4 GB + "large_file": false, + + // If true, leave pp data in OUTPUT_DIR after preprocessing; if false, delete pp data after PODs + // run to completion + "save_pp_data": true, + + // Set to true to save HTML and bitmap plots in a .tar file. + "make_variab_tar": false, + + // Generate html output for multiple figures per case + "make_multicase_figure_html": false, + + // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved + // under a unique name. + "overwrite": false, + // List with custom preprocessing script(s) to run on data + // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository + "user_pp_scripts" : [] +} diff --git a/tests/github_actions_test_ubuntu_set2.jsonc b/tests/github_actions_test_ubuntu_2.jsonc similarity index 100% rename from tests/github_actions_test_ubuntu_set2.jsonc rename to tests/github_actions_test_ubuntu_2.jsonc diff --git a/tests/github_actions_test_ubuntu_set3.jsonc b/tests/github_actions_test_ubuntu_3.jsonc similarity index 97% rename from tests/github_actions_test_ubuntu_set3.jsonc rename to tests/github_actions_test_ubuntu_3.jsonc index 376548366..0f3dba986 100644 --- a/tests/github_actions_test_ubuntu_set3.jsonc +++ b/tests/github_actions_test_ubuntu_3.jsonc @@ -5,10 +5,10 @@ "pod_list": [ //"temp_extremes_distshape",// needs matplotlib 3.7.3, but not avail on conda yet //"tropical_pacific_sea_level", - "ocn_surf_flux_diag", - "mixed_layer_depth", - "seaice_suite", - "stc_eddy_heat_fluxes" + "ocn_surf_flux_diag" + //"mixed_layer_depth", + //"seaice_suite", + //"stc_eddy_heat_fluxes" // "albedofb" ], "case_list" : { diff --git a/tools/get_POD_varname/get_POD_varname.py b/tools/get_POD_varname/get_POD_varname.py new file mode 100644 index 000000000..49835e2be --- /dev/null +++ b/tools/get_POD_varname/get_POD_varname.py @@ -0,0 +1,167 @@ +import json +import pandas as pd +import os +import requests +import re +from datetime import datetime +from jinja2 import Template + +base_url = "https://github.com/NOAA-GFDL/MDTF-diagnostics/blob/main/diagnostics" +api_url = "https://api.github.com/repos/NOAA-GFDL/MDTF-diagnostics/contents/diagnostics" + +#edit if you would like to save csv for each POD +save_csv = False + +def good_status(code): + # 2xx is a positive completion return code + if str(code)[0] == '2': + return True + else: + return False + +def get_json_content(url): + response = requests.get(url) + if good_status(response.status_code): + try: + raw_url = url.replace('github.com', 'raw.githubusercontent.com').replace('/blob', '') + response = requests.get(raw_url) + if good_status(response.status_code): + # Remove comments from JSONC content, this part is important otherwise the attributes could not be extracted correctly + jsonc_content = response.text + json_content = re.sub(r'//.*?\n|/\*.*?\*/', '', jsonc_content, flags=re.S) + json_content = re.sub(r',\s*([}\]])', r'\1', json_content) + return json.loads(json_content) + else: + print(f"Cannot get content from URL: {raw_url}, Status code: {response.status_code}") + return None + except json.JSONDecodeError: + print(f"Error decoding JSON from URL: {url}") + return None + else: + print(f"Failure in function get_json_content()!") + return None + +def get_folders_via_api(): + response = requests.get(api_url) + if good_status(response.status_code): + data = response.json() + folders = [item['name'] for item in data if item['type'] == 'dir'] + return folders + else: + print(f"Failure in function get_folders_via_api()!") + return [] + +def process_settings_jsonc(folder_name, json_content): + frequency = json_content.get("data", {}).get("frequency", "N/A") + convention = json_content.get("settings", {}).get("convention", "N/A") + # Extract varlist + df_data = [] + for var, attributes in json_content.get("varlist", {}).items(): + if frequency == 'N/A' and 'frequency' in attributes: + frequency = attributes.get("frequency", "N/A") + df_data.append([ + var, + attributes.get("units", "N/A"), + attributes.get("realm", "N/A"), + ', '.join(attributes.get("dimensions", [])), + frequency, + attributes.get("standard_name", "N/A"), + convention + ]) + + # move the freq before standard name + columns = ["Variable", "Units", "Realm", "Dimensions", "Frequency", "Standard Name", "Convention"] + df = pd.DataFrame(df_data, columns=columns) + + if save_csv: + # Save the file in case we want to make use of it to modity the XML files later + csv_filename = f"{folder_name}_varlist.csv" + df.to_csv(csv_filename, index=False) + + return df + +# Two ways to get the folder names, one is automatic search (cons: may include those example PODs and some incomplete PODs), one is to add manully +folders = get_folders_via_api() + +# Check if folders were found +if not folders: + print("-----------------------------------------------------------------") + print("No folders found under the diagnostics directory. Please check!!!") + print("-----------------------------------------------------------------") +else: + # Get the time + current_time = datetime.now().strftime("%Y/%m/%d %H:%M:%S") + # Initialize HTML + html_template = Template(""" + + + + + + MDTF PODs Variable Lists (Tables For Individual POD and Combined All PODs) + + + +

MDTF PODs Variable Lists (Tables For Individual POD and Combined All PODs)

+

Generated at {{ current_time }}. For questions, contact Wenhao.Dong@noaa.gov

+ {% for folder_name, table_html in tables.items() %} +

POD: {{ folder_name }}

+
{{ table_html | safe }}
+ {% endfor %} +

All PODs

+
{{ all_pods_table | safe }}
+ + + """) + + # Loop through each POD and generate tables + tables = {} + all_data = [] + for folder in folders: + settings_url = f"{base_url}/{folder}/settings.jsonc" + json_content = get_json_content(settings_url) + if json_content: + df = process_settings_jsonc(folder, json_content) + tables[folder] = df.to_html(classes='table table-striped', index=False) + df['Used by'] = folder # Add 'Used by' column only for the combined table + all_data.append(df) + else: + print(f"No settings.jsonc found for folder: {folder}") + + # Merge the data for all PODs + if all_data: + all_pods_df = pd.concat(all_data) + all_pods_df = all_pods_df.groupby(["Variable", "Units", "Realm", "Dimensions", "Frequency", "Standard Name"])['Used by'].apply(lambda x: ', '.join(sorted(set(x)))).reset_index() + columns = ["Variable", "Units", "Realm", "Dimensions", "Frequency", "Standard Name", "Used by"] + all_pods_df = all_pods_df[columns] + all_pods_table = all_pods_df.to_html(classes='table table-striped', index=False) + + else: + all_pods_table = "

No single data available

" + + html_content = html_template.render(tables=tables, all_pods_table=all_pods_table, current_time=current_time) + with open("MDTF_Variable_Lists.html", "w") as f: + f.write(html_content) + + print("\nHTML file 'MDTF_Variable_Lists.html'") + print("Well Done!!!")