diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c5b8f3e1..4635a441 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -22,7 +22,7 @@ jobs: with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-label: 'stale' - exempt-issue-labels: ['category: Discussion','category: Feature Request','deferred','help needed: Open Research Problem','help needed: Request Input from Community','never stale','TODO: Documentation'] + exempt-issue-labels: 'category: Discussion,category: Feature Request,deferred,help needed: Open Research Problem,help needed: Request Input from Community,never stale,TODO: Documentation' days-before-issue-stale: 30 days-before-issue-close: 7 stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. If there are no updates within 7 days it will be closed. You can add the "never stale" tag to prevent the issue from closing this issue.' diff --git a/CHANGELOG.md b/CHANGELOG.md index 6513034f..6ac76cbf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,9 +5,31 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [3.10.0] - 2024-11-07 +### Added +- Added TSOIL1 field to `ExtState` +- Added `download_data.py` and `download_data.yml` to the `run` folder. These will be copied into HEMCO standalone rundirs +- Added `run/cleanRunDir.sh` script to remove old output files & log files +- Added documentation for the HEMCO 3.10.0 release, including HEMCO standalone dry-run documentation + +### Changed +- Added emission factors for ALK6, C4H6, EBZ, STYR, TMB for GFED and FINN biomass burning extensions +- Updated soil NOx extention to include the option to use soil temperature and parameterization based on Yi Wang et al. (ERL, 2021) instead of the temperature at 2 meters. +- Updated HEMCO standalone to print the dry-run header to the HEMCO log file unit `HcoState%Config%Err%Lun` only if the file is opened +- ReadTheDocs update: Now use GNU 12.2.0 compilers in environment file examples +- Updated `runHEMCO.sh` standalone script: Change partitions, and pipe output to log file + +### Fixed +- Fixed formatting error in `.github/workflows/stale.yml` that caused the Mark Stale Issues action not to run +- Updated to `jinja2==3.1.4` in `docs/requirements.txt` (fixes a security issue) + +### Removed +- Example "Scale (or zero) emissions with a rectangular mask" from ReadTheDocs. This is currently not working. + ## [3.9.3] - 2024-08-13 ### Fixed - Added brackets around `exempt-issue-labels` list in `.github/workflows/stale.yml` +- Fixed incorrect pressure handling in HEMCO standalone (see issue #277) ## [3.9.2] - 2024-07-24 ### Changed diff --git a/CMakeLists.txt b/CMakeLists.txt index 332aeee5..d4c6e222 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,7 +1,7 @@ # HEMCO/CMakeLists.txt cmake_minimum_required(VERSION 3.5) -project(HEMCO VERSION 3.9.3 LANGUAGES Fortran) +project(HEMCO VERSION 3.10.0 LANGUAGES Fortran) # Reminder: Make sure to also update version in src/Core/hco_error_mod.F90 #----------------------------------------------------------------------------- diff --git a/docs/requirements.txt b/docs/requirements.txt index 60b39878..71e269de 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -12,4 +12,4 @@ sphinxcontrib-bibtex==2.6.2 sphinx-autobuild==2021.3.14 recommonmark==0.7.1 docutils==0.20.1 -jinja2==3.1.3 +jinja2==3.1.4 diff --git a/docs/source/conf.py b/docs/source/conf.py index 792fe3e3..27abe255 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -23,7 +23,7 @@ author = 'GEOS-Chem Support Team' # The full version, including alpha/beta/rc tags -release = '3.9.3' +release = '3.10.0' # -- General configuration --------------------------------------------------- diff --git a/docs/source/geos-chem-shared-docs b/docs/source/geos-chem-shared-docs index ce3c86ac..14adeb4d 160000 --- a/docs/source/geos-chem-shared-docs +++ b/docs/source/geos-chem-shared-docs @@ -1 +1 @@ -Subproject commit ce3c86accdcd73d3c3a46e41be1fcc09775200dd +Subproject commit 14adeb4d8a9cfbdebd688033e4c22dd034ac105b diff --git a/docs/source/hco-ref-guide/known-bugs.rst b/docs/source/hco-ref-guide/known-bugs.rst index 1fe0fdf8..7c053656 100644 --- a/docs/source/hco-ref-guide/known-bugs.rst +++ b/docs/source/hco-ref-guide/known-bugs.rst @@ -4,18 +4,15 @@ Known bugs and issues ##################### -Please see our `HEMCO issue tracker on Github -`_ for a list of recent -HEMCO bugs and fixes. - -=================== -Current bug reports -=================== - -These `bug reports (listed on the HEMCO issue tracker) -`_ -are currently unresolved. We hope to fix these in -future HEMCO releases. +Please see our HEMCO issue tracker on Github for a list of recent +HEMCO bugs and fixes: + +- `Bugs and issues that have not yet been resolved + `_ +- `Bugs that have been resolved + `_ + +Other known issues are listed below: ===================================== Masks cannot be applied to extensions diff --git a/docs/source/hco-ref-guide/more-examples.rst b/docs/source/hco-ref-guide/more-examples.rst index 03c5d7ae..76a707ad 100644 --- a/docs/source/hco-ref-guide/more-examples.rst +++ b/docs/source/hco-ref-guide/more-examples.rst @@ -90,53 +90,6 @@ These steps can also be used to scale emissions for different regions (e.g. provinces, states) by providing HEMCO with a mask file containing the regions to be scaled. - -.. _cfg-ex-scl-rec-mask: - -Scale (or zero) emissions with a rectangular mask -------------------------------------------------- - -.. important:: - - If you are using HEMCO versions prior to 3.5.0, you may encounter a - bug when trying to follow this example. See Github issue: - https://github.com/geoschem/HEMCO/issues/153 for a workaround. - -Another way to scale all emissions over a country (or set them to -zero) is to apply a rectangular mask. - -For example, to set all emissions over Australia and surrounding -islands to zero, add this line to the :ref:`hco-cfg-masks` section of -:ref:`the HEMCO configuration file `: - -.. code-block:: kconfig - - 1010 AUS_MASK 105.0/-46.0/160.0/-10.0 - 2000/1/1/0 C xy 1 1 105/-46/160/–10 - -Here you directly provide the lower left and upper right corner of the -mask region mask instead of a netCDF file: -:literal:`lon1/lat1/lon2/lat2` You can then combine this mask with -a scale factor of zero to eliminate any emissions over that area. - -In :ref:`Base emissions ` - -.. code-block:: kconfig - - 0 HTAP_NO_IND /path/to/HTAP_NO_INDUSTRY.generic.01x01.nc emi_no 2008-2010/1-12/1/0 C xy kg/m2/s NO 1/27/25/501 1/2 4 - -In :ref:`Scale Factors `: - -.. code-block:: kconfig - - 501 SCALE_AUS 0.0 - - - xy unitless 1 1010 - -In :ref:`hco-cfg-masks`: - -.. code-block:: kconfig - - # Defines a rectangular region that should cover AUS + surrounding islands - 1010 AUS_MASK 105.0/-46.0/160.0/-10.0 – 2000/1/1/0 C xy 1 1 105.0/-46.0/160.0/-10.0 - .. _cfg-ex-scl-spc: Scale emissions by species diff --git a/docs/source/hco-ref-guide/version-history.rst b/docs/source/hco-ref-guide/version-history.rst index a5d88d5e..ef93eb37 100644 --- a/docs/source/hco-ref-guide/version-history.rst +++ b/docs/source/hco-ref-guide/version-history.rst @@ -4,6 +4,10 @@ HEMCO version history ##################### -Please see the `CHANGELOG.md file the HEMCO GitHub repository -`_ for a -list of updates by HEMCO version. +For more information about HEMCO versions, please see: + +- `The CHANGELOG.md file + `_ + +- `The Releases page at github.com/geoschem/HEMCO + `_ diff --git a/docs/source/hco-sa-guide/download-code.rst b/docs/source/hco-sa-guide/download-code.rst index a845bc20..281adb12 100644 --- a/docs/source/hco-sa-guide/download-code.rst +++ b/docs/source/hco-sa-guide/download-code.rst @@ -11,15 +11,11 @@ default. .. code-block:: console - $ git clone https://github.com/geoschem/hemco.git HEMCO + $ git clone --recurse-submodules https://github.com/geoschem/hemco.git HEMCO $ cd HEMCO -If you would like a different version of HEMCO you can check out a -different branch. For example, to check out the **dev** branch, type: - -.. code-block:: console - - $ git checkout dev +This will place you on the **main** branch, which contains the latest +stable release of HEMCO. .. tip:: diff --git a/docs/source/hco-sa-guide/download-data.rst b/docs/source/hco-sa-guide/download-data.rst index 99ae1033..6a03365e 100644 --- a/docs/source/hco-sa-guide/download-data.rst +++ b/docs/source/hco-sa-guide/download-data.rst @@ -8,16 +8,27 @@ Before starting a HEMCO standalone simulation, make sure that all of the relevant emissions and meteorology that you will need for your simulation are present on disk. -If you are located at an institution where there are several other -HEMCO and/or `GEOS-Chem `_ users, -then data for HEMCO standalone might already be located in a shared -folder. Ask your sysadmin or IT staff. - -If you are using HEMCO standalone on the Amazon Web Services EC2 -cloud computing platform, then you will have access to an S3 bucket -(:file:`s3://gcgrid/`) with emissions inventories and meteorological data. - -If you still need to download data for your HEMCO standalone -simulation, we recommend using the :program:`bashdatacatalog` tool. -For more information, please see our Supplemental Guide entitled -:ref:`bashdatacatalog`. +.. tip:: + + If you are located at an institution where there are several other + HEMCO and/or `GEOS-Chem `_ users, + then data for HEMCO standalone might already be located in a shared + folder. Ask your sysadmin or IT staff. + +The :ref:`GEOS-Chem Input Data ` portal is the main source of +emissions and meteorology simulations. This data, which is curated by +the GEOS-Chem Support Team at Washington University in St. Louis, is stored an +Amazon Web Services S3 bucket named `s3://geos-chem +`_. You can easily +download the data from there to your computer cluster or AWS EC2 cloud +instance. + +You can use a couple of different methods to download data. Click on +one of the links below for more information. + +.. toctree:: + :maxdepth: 1 + + hco-sa-dry-run.rst + ../geos-chem-shared-docs/supplemental-guides/bashdatacatalog.rst + hco-sa-globus.rst diff --git a/docs/source/hco-sa-guide/hardware.rst b/docs/source/hco-sa-guide/hardware.rst index 3a707dbf..4ec2c98b 100644 --- a/docs/source/hco-sa-guide/hardware.rst +++ b/docs/source/hco-sa-guide/hardware.rst @@ -1,3 +1,7 @@ +.. |br| raw:: html + +
+ .. _hco-sa-hard: ############################ @@ -17,9 +21,8 @@ Computer system requirements Before you can run HEMCO standalone, you will need to have one the following items. -#. A Unix/Linux based computer system, OR: -#. An account on the `Amazon Web Services cloud computing platform - `_. +#. A Linux based computer system, OR: +#. An account on the Amazon Web Services cloud computing platform. If your institution has computational resources (e.g. a shared computer cluster with many cores, sufficient disk storage and memory), @@ -32,16 +35,27 @@ should consider signing up for access to the Amazon Web Services cloud. Using the cloud has the following advantages: #. You can run HEMCO standalone without having to invest in - local hardware and maintenance personnel. + local hardware and maintenance personnel. |br| + |br| + #. You won't have to download any meteorological fields or emissions data. All of the necessary data input for HEMCO standalone - will be available on the cloud. + will be available on the cloud. |br| + |br| + #. You can initialize your computational environment with all of the - required software (e.g. compilers,libraries, utilities) that you - need for HEMCO standalone. + required software (e.g. compilers, libraries, utilities) that you + need for HEMCO standalone. |br| + |br| + #. Your runs will be 100% reproducible, because you will initialize - your computational environment the same way every time. -#. You will avoid compilation errors due to library incompatibilities. + your computational environment the same way every time. |br| + |br| + +#. You will avoid compilation errors due to library + incompatibilities. |br| + |br| + #. You will be charged for the computational time that you use, and if you download data off the cloud. diff --git a/docs/source/hco-sa-guide/hco-sa-dry-run.rst b/docs/source/hco-sa-guide/hco-sa-dry-run.rst new file mode 100644 index 00000000..9c110f02 --- /dev/null +++ b/docs/source/hco-sa-guide/hco-sa-dry-run.rst @@ -0,0 +1,199 @@ +.. |br| raw:: html + +
+ +.. _hco-sa-dry-run: + +####################################### +Download data with a dry-run simulation +####################################### + +Follow the steps below to perform a HEMCO standalone dry-run simulation: + +========================== +Complete preliminary setup +========================== + +Make sure that you have done the following steps; + +#. :ref:`Downloaded the HEMCO source code ` +#. :ref:`Compiled the HEMCO standalone code ` +#. :ref:`Configured your simulation ` + +.. _dry-run-run-flag: + +============================================= +Run the executable with the ``--dryrun`` flag +============================================= + +Run the HEMCO standalone executable file at the command line with the +:command:`--dryrun` command-line argument as shown below: + +.. code-block:: console + + $ ./hemco_standalone -c HEMCO_sa_Config.rc --dryrun | tee log.dryrun + +The :program:`tee` command will send the output of the dryrun to the +screen as well as to a file named :file:`log.dryrun`. + +The :file:`log.dryrun` file will look somewhat like a regular +HEMCO standalone log file but will also contain a list of data files and +whether each file was found on disk or not. This information will be +used by the :file:`download_data.py` script in the next step. + +You may use whatever name you like for the dry-run output +log file (but we prefer :file:`log.dryrun`). + +============================================================== +Run the :file:`download_data.py` script on the dryrun log file +============================================================== + +Once you have successfully executed a HEMCO standalone dry-run, you +can use the output from the dry-run (contained in the +:file:`log.dryrun` file) to download the data files that the HEMCO +standalone will need to perform the corresponding "production" +simulation. You will download data from the :ref:`GEOS-Chem Input Data +` portal. + +.. important:: + + Before you use the :file:`download_data.py` script, make sure to + initialize a Mamba or Conda environment with the relevant command + shown below: + + .. code-block:: console + + $ mamba activate ENV-NAME # If using Mamba + + $ conda activate ENV-NAME # If using Conda + + Here :literal:`ENV-NAME` is the name of your environment. + + Also make sure that you have installed the PyYAML module to your + conda environment. PyYAML will allow the :file:`download_data.py` + script to read certain configurable settings from a YAML file in + your run directory. + + The Python environment for GCPy has all of the proper packages + that you need to download data from a dry-run simulation. For + more information, please see `gcpy.readthedocs.io + `_. + +Navigate to your HEMCO run directory where you executed the dry-run +and type. + +.. code-block:: console + + $ ./download_data.py log.dryrun PORTAL-NAME + +where: + +- :file:`download_data.py` is the dry-run data download program + (written in Python). It is included in each :ref:`HEMCO standalone + run directory ` that you create. |br| + |br| + +- :file:`log.dryrun` is the log file from your HEMCO standalone + dry-run simulation. |br| + |br| + +- :literal:`PORTAL-NAME` specifies the data portal that you wish + to download from. Allowed values are: + + .. list-table:: Allowed values for the ``PORTAL-NAME`` argument + to ``download_data.py`` + :header-rows: 1 + :align: center + + * - Value + - Downloads from portal + - With this command + - Via this method + * - geoschem+aws + - :ref:`GEOS-Chem Input Data ` + - :command:`aws s3 cp` + - AWS CLI + * - geoschem+http + - :ref:`GEOS-Chem Input Data ` + - :command:`wget` + - HTTP + * - rochester + - :ref:`GCAP 2.0 met data @ Rochester ` + - :command:`wget` + - HTTP + +For example, to download data from the :ref:`GEOS-Chem Input Data +` portal using the AWS CLI download (which is faster than +HTTP download), use this command: + +.. code-block:: console + + $ ./download_data.py log.dryrun geoschem+s3 + +.. note:: + + You must have the `AWS CLI (command-line interface) + `_ software installed on your system + before in order to use the :literal:`geoschem+aws` or + :literal:`nested+aws` options in the table listed above. + +The :file:`download_data.py` program will generate a **log of +unique data files** (i.e. with all duplicate listings removed), which +looks similar to this: + +.. code-block:: text + + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !!! LIST OF (UNIQUE) FILES REQUIRED FOR THE SIMULATION + !!! Start Date : 20190701 000000 + !!! End Date : 20190701 010000 + !!! Simulation : fullchem + !!! Meteorology : MERRA2 + !!! Grid Resolution : 4.0x5.0 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + ./HEMCO_Config.rc + ./HEMCO_Config.rc.gmao_metfields + ./HEMCO_Diagn.rc + ./HISTORY.rc + ./Restarts/GEOSChem.Restart.20190701_0000z.nc4 --> /home/ubuntu/ExtData/GEOSCHEM_RESTARTS/GC_14.5.0/GEOSChem.Restart.fullchem.20190701_0000z.nc4 + ./Restarts/HEMCO_restart.201907010000.nc + ./geoschem_config.yml + /path/to/ExtData/CHEM_INPUTS/CLOUD_J/v2024-09/FJX_j2j.dat + /path/to/ExtData/CHEM_INPUTS/CLOUD_J/v2024-09/FJX_scat-aer.dat + /path/to/ExtData/CHEM_INPUTS/CLOUD_J/v2024-09/FJX_scat-cld.dat + /path/to/ExtData/CHEM_INPUTS/CLOUD_J/v2024-09/FJX_scat-ssa.dat + /path/to/ExtData/CHEM_INPUTS/CLOUD_J/v2024-09/FJX_spec.dat + /path/to/ExtData/CHEM_INPUTS/FastJ_201204/fastj.jv_atms_dat.nc + /path/to/ExtData/CHEM_INPUTS/Linoz_200910/Linoz_March2007.dat + /path/to/ExtData/CHEM_INPUTS/Olson_Land_Map_201203/Olson_2001_Drydep_Inputs.nc + /path/to/ExtData/CHEM_INPUTS/UCX_201403/NoonTime/Grid4x5/InitCFC_JN2O_01.dat + + ... etc ... + +This name of this "unique" log file will be the same as the log file +with dryrun ouptut, with :file:`.unique` appended. In our above +example, we passed :file:`log.dryrun` to :file:`download_data.py`, so +the "unique" log file will be named :file:`log.dryrun.unique`. This +"unique" log file can be very useful for documentation purposes. + +============================================= +Skip download, but create log of unique files +============================================= + +If you wish to only produce the \*log of unique data files without +downloading any data, then type the following command from within your +HEMCO-standalone run directory: + +.. code-block:: console + + $ ./download_data.py log.dryrun skip-download + +or for short: + +.. code-block:: console + + $ ./download_data.py log.dryrun skip + +This can be useful if you already have the necessary data downloaded to +your system but wish to create the log of unique files for documentation +purposes (such as for benchmark simulations, etc.) diff --git a/docs/source/hco-sa-guide/hco-sa-globus.rst b/docs/source/hco-sa-guide/hco-sa-globus.rst new file mode 100644 index 00000000..f4d21774 --- /dev/null +++ b/docs/source/hco-sa-guide/hco-sa-globus.rst @@ -0,0 +1,13 @@ +.. _hco-sa-globus: + +######################### +Download data with Globus +######################### + +Many institutions use the `Globus `_ file transfer +utility, which has much higher data download speeds than normal SSH, +FTP, or HTTP connections. + +If your institution uses Globus, you can download data from the +**GEOS-Chem Data (WashU)** endpoint to your computer system. Ask your +IT support staff if Globus is supported at your institution. diff --git a/docs/source/hco-sa-guide/login-env.rst b/docs/source/hco-sa-guide/login-env.rst index 8c6710f6..518e284a 100644 --- a/docs/source/hco-sa-guide/login-env.rst +++ b/docs/source/hco-sa-guide/login-env.rst @@ -4,17 +4,18 @@ Configure your login environment ################################ +.. tip:: + + You may :ref:`skip ahead ` if you will be using + :program:`HEMCO standalone` on an Amazon EC2 cloud instance. + When you initialize the EC2 instance with one of the pre-configured Amazon + Machine Images (AMIs) all of the required software libraries will be + automatically loaded. + In this chapter, you will learn how to load the software packages that you have created into your computational environment. This will need to be done each time you log in to your computer system. -.. tip:: - - You may skip this section if you plan on using HEMCO standalone on - an Amazon EC2 cloud instance. When you initialize the EC2 instance - with one of the pre-configured Amazon Machine Images (AMIs) all of - the required software libraries will be automatically loaded. - An environment file does the following: 1. Loads software libraries into your login environment. This is @@ -40,36 +41,64 @@ or :file:`~/.bash_aliases` startup scripts. .. _hco-sa-login-gnu: ================================================ -Sample environment file for GNU 10.2.0 compilers +Sample environment file for GNU 12.2.0 compilers ================================================ -Below is a sample environment file from the Harvard Cannon computer -cluster. This file will load software libraries built with the GNU -10.2.0 compilers. +Below is a sample environment file (based on an enviroment file for +the Harvard Cannon computer cluster). This file will load software +libraries built with the `GNU 12.2.0 compilers +`_. + +.. note:: + + This environment file shown below assumes that required software + packages for :program:`HEMCO standalone` are available as + pre-built modules. If your computer system does not have these + packages pre-installed, you can build them with Spack. Please see + our :ref:`spackguide` supplemental guide for detailed instructions. Save the code below (with any appropriate modifications for your own -computer system) to a file named :file:`~/gnu10.env`. +computer system) to a file named :file:`~/gnu12.env`. .. code-block:: bash + ############################################################################### + # + # Environment file for HEMCO + GNU Compiler Collection 12.2.0 + # + ############################################################################### + + # Display message (if we are in a terminal window) + if [[ $- = *i* ]] ; then + echo "Loading modules for GEOS-Chem Classic, please wait ..." + fi + #============================================================================== - # Load software packages (EDIT AS NEEDED) + # Unload all previously-unloaded software #============================================================================== - # Unload all modules first + # Unload packages loaded with "module load" module purge - # Load modules - module load gcc/10.2.0-fasrc01 # gcc / g++ / gfortran - module load openmpi/4.1.0-fasrc01 # MPI - module load netcdf-c/4.8.0-fasrc01 # netcdf-c - module load netcdf-fortran/4.5.3-fasrc01 # netcdf-fortran + #============================================================================== + # Load software packages for GNU 12.2.0 + #============================================================================== + if [[ $- = *i* ]] ; then + echo "... Loading FASRC-built software, please wait ..." + fi + + # Pre-built modules needed for HEMCO + # (NOTE: These may be named differently on your system) + module load gcc/12.2.0-fasrc01 # gcc / g++ / gfortran + module load openmpi/4.1.4-fasrc01 # MPI + module load netcdf-c/4.9.2-fasrc01 # netcdf-c + module load netcdf-fortran/4.6.0-fasrc02 # netcdf-fortran module load flex/2.6.4-fasrc01 # Flex lexer (needed for KPP) module load cmake/3.25.2-fasrc01 # CMake (needed to compile) #============================================================================== # Environment variables and related settings - # (NOTE: Lmod will define _HOME variables for each loaded module + # (NOTE: Lmod will define _HOME variables for each loaded module) #============================================================================== # Make all files world-readable by default @@ -94,13 +123,13 @@ computer system) to a file named :file:`~/gnu10.env`. # netCDF if [[ "x${NETCDF_HOME}" == "x" ]]; then - export NETCDF_HOME="${NETCDF_C_HOME}" + export NETCDF_HOME="${NETCDF_C_HOME}" fi export NETCDF_C_ROOT="${NETCDF_HOME}" - export NETCDF_FORTRAN_ROOT="${NETCDF_FORTRAN_HOME}" + export NETCDF_FORTRAN_ROOT=${NETCDF_FORTRAN_HOME} # KPP 3.0.0+ - export KPP_FLEX_LIB_DIR="${FLEX_HOME}/lib64" + export KPP_FLEX_LIB_DIR=${FLEX_HOME}/lib64 #============================================================================== # Set limits @@ -114,8 +143,23 @@ computer system) to a file named :file:`~/gnu10.env`. #============================================================================== # Print information #============================================================================== + module list + echo "" + echo "Environment:" + echo "" + echo "CC : ${CC}" + echo "CXX : ${CXX}" + echo "FC : ${FC}" + echo "KPP_FLEX_LIB_DIR : ${KPP_FLEX_LIB_DIR}" + echo "MPI_HOME : ${MPI_HOME}" + echo "NETCDF_HOME : ${NETCDF_HOME}" + echo "NETCDF_FORTRAN_HOME : ${NETCDF_FORTRAN_HOME}" + echo "OMP_NUM_THREADS : ${OMP_NUM_THREADS}" + echo "" + echo "Done sourcing ${BASH_SOURCE[0]}" + .. tip:: Ask your sysadmin how to load software libraries. If you are using @@ -123,11 +167,14 @@ computer system) to a file named :file:`~/gnu10.env`. be a software module system installed, with commands similar to those listed above. -Then you can activate these seetings from the command line by typing: +You may also place the above command within your HEMCO standalone run +script, which will be discussed in a subsequent chapter. + +To activate the settings contained in the environment file, type: .. code-block:: console - $ source ~/gnu10.env + $ . ~/gnu12.env .. _hco-sa-login-intel: @@ -144,9 +191,11 @@ system) into a file named :file:`~/intel23.env`. .. code-block:: bash - #============================================================================== - # Load software packages (EDIT AS NEEDED) - #============================================================================== + ############################################################################### + # + # Environment file for HEMCO + GNU Compiler Collection 12.2.0 + # + ############################################################################### # Unload all modules first module purge @@ -208,6 +257,21 @@ system) into a file named :file:`~/intel23.env`. module list + echo "" + echo "Environment:" + echo "" + echo "CC : ${CC}" + echo "CXX : ${CXX}" + echo "FC : ${FC}" + echo "KPP_FLEX_LIB_DIR : ${KPP_FLEX_LIB_DIR}" + echo "MPI_HOME : ${MPI_HOME}" + echo "NETCDF_HOME : ${NETCDF_HOME}" + echo "NETCDF_FORTRAN_HOME : ${NETCDF_FORTRAN_HOME}" + echo "OMP_NUM_THREADS : ${OMP_NUM_THREADS}" + echo "" + echo "Done sourcing ${BASH_SOURCE[0]}" + + .. tip:: Ask your sysadmin how to load software libraries. If you @@ -215,11 +279,11 @@ system) into a file named :file:`~/intel23.env`. are there will be a software module system installed, with commands similar to those listed above. -Then you can activate these seetings from the command line by typing: +To activate the settings contained in the environment file, type: .. code-block:: console - $ source intel23.env + $ . intel23.env .. tip:: @@ -253,7 +317,7 @@ These environment variables should be defined in your .. note:: - HEMCOc only requires the Fortran compiler. But you will + HEMCO only requires the Fortran compiler. But you will also need the C and C++ compilers if you plan to build other software packages or :ref:`install libraries manually `. @@ -310,8 +374,8 @@ control the OpenMP parallelization settings: ulimit -s unlimited export OMP_STACKSIZE=500m - The :command:`ulimit -s unlimited` will tell the bash shell to use the - maximum amount of stack memory that is available. + The :command:`ulimit -s unlimited` command will tell the bash shell + to use the maximum amount of stack memory that is available. The environment variable :envvar:`OMP_STACKSIZE` must also be set to a very large number. In this example, we are nominally requesting 500 MB of diff --git a/docs/source/hco-sa-guide/software.rst b/docs/source/hco-sa-guide/software.rst index 4bb876f8..a8ad0057 100644 --- a/docs/source/hco-sa-guide/software.rst +++ b/docs/source/hco-sa-guide/software.rst @@ -18,7 +18,8 @@ installed on your system in order to use :program:`HEMCO standalone`. - If you plan to run HEMCO standalone on the Amazon Web services cloud, then all of these libraries will be included with the Amazon - Machine Image (AMI) that you will use to start your cloud instance. |br| + Machine Image (AMI) that you will use to start your cloud instance. + You may skip ahead to the :ref:`hco-sa-download` chapter. |br| |br| - If your computer cluster has none of these libraries installed, then diff --git a/docs/source/index.rst b/docs/source/index.rst index f9a7f688..86044c39 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -68,9 +68,9 @@ HEMCO is given in :cite:t:`Keller_et_al._2014` and geos-chem-shared-docs/supplemental-guides/load-libraries-guide.rst geos-chem-shared-docs/supplemental-guides/spack-guide.rst - geos-chem-shared-docs/supplemental-guides/customize-guide.rst geos-chem-shared-docs/supplemental-guides/error-guide.rst geos-chem-shared-docs/supplemental-guides/debug-guide.rst + geos-chem-shared-docs/doc/gcid-portal-overview.rst geos-chem-shared-docs/supplemental-guides/bashdatacatalog.rst geos-chem-shared-docs/supplemental-guides/parallel-guide.rst geos-chem-shared-docs/supplemental-guides/netcdf-guide.rst diff --git a/run/cleanRunDir.sh b/run/cleanRunDir.sh new file mode 100755 index 00000000..6629ac42 --- /dev/null +++ b/run/cleanRunDir.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +#============================================================================ +# cleanRunDir.sh: Removes files created by GEOS-Chem from a run directory +# +# Usage: +# ------ +# $ ./cleanRunDir.sh # Removes model output files in the run directory. +# # Also prompts the user before removing diagnostic +# # output files in OutputDir/. +# +# $ ./cleanRunDir.sh 1 # Removes model ouptut files in the run directory, +# # but will remove diagnostic output files without +# # prompting first. USE WITH CAUTION! +#============================================================================ + +# Clean model output files in the run directory +rm -fv *~ +rm -fv HEMCO.log +rm -fv log* +rm -fv slurm-* +rm -fv core.* +rm -fv fort.* + +#---------------------------------------------------------------------------- +# Clean data files in OutputDir. +# These are netCDF files (*.nc) and KPP standalone interface files (*.txt). +#---------------------------------------------------------------------------- +if [[ "x${1}" == "x" ]]; then # User confirmation required + rm -Iv ./OutputDir/*.nc* + rm -Iv ./OutputDir/*.txt +else # User Confirmation not required + rm -fv ./OutputDir/*.nc* + rm -fv ./OutputDir/*.txt* +fi + +#--------------------------------------------------------------------------- +# Give instruction to reset start date if using GCHP +#--------------------------------------------------------------------------- +echo "Reset simulation start date in cap_restart if using GCHP" diff --git a/run/createRunDir.sh b/run/createRunDir.sh index 84af3edc..bb4ddc73 100755 --- a/run/createRunDir.sh +++ b/run/createRunDir.sh @@ -306,6 +306,7 @@ cp ./HEMCO_sa_Spec.rc ${rundir} cp ./${grid_file} ${rundir} cp ./runHEMCO.sh ${rundir} cp ./README ${rundir} +cp ./download_data* ${rundir} cp ${hco_config_dir}/HEMCO_Config.* ${rundir} if [[ -f ${hco_config_dir}/HEMCO_Diagn.rc ]]; then cp ${hco_config_dir}/HEMCO_Diagn.rc ${rundir} diff --git a/run/download_data.py b/run/download_data.py new file mode 100755 index 00000000..54ca1729 --- /dev/null +++ b/run/download_data.py @@ -0,0 +1,471 @@ +#!/usr/bin/env python3 + +""" +Description: +------------ +This Python script (assumes Python3) reads a GEOS-Chem or +HEMCO-standalone log file containing dry-run output and does +the following: + + (1) Creates a list of unique files that are required for the + GEOS-Chem or HEMCO-standalone simulation; + + (2) Creates a bash script to download missing files from the AWS + s3://gcgrid bucket or from a specified server; + + (3) Executes the bash script to download the necessary data; + + (4) Removes the bash script upon successful download. + + +Remarks: +-------- + (1) This script only requires the "os", "sys", "subprocess", and + PyYaml packages. + + (2) Jiawei Zhuang found that it is much faster to issue aws s3 cp + commands from a bash script than a Python script. Therefore, + in this routine we create a bash script with all of the + download commands that will be executed by the main routine. +""" + +# Imports +import os +import sys +import subprocess +import yaml + +# Exit with error if we are not using Python3 +assert sys.version_info.major >= 3, \ +"ERROR: Python 3 is required to run download_data.py!" + +# Define global variables +DATA_DOWNLOAD_SCRIPT = "./auto_generated_download_script.sh" +CONFIG_FILE = "./HEMCO_sa_Config.rc" +TIME_FILE = "./HEMCO_sa_Time.rc" + +def read_config_file( + config_file, + to_str=False +): + """ + Reads configuration information from a YAML file. + + Args: + ----- + config_file : str + The configuration file in YAML format + to_str : bool + Set this to True if you wish to return the data in the YAML + file as strings, or False otherwise. + + Returns: + -------- + config : dict + Dictionary with the contents of the YAML file + """ + try: + with open(config_file, encoding="UTF-8") as stream: + if to_str: + return yaml.load(stream, Loader=yaml.loader.BaseLoader) + return yaml.load(stream, Loader=yaml.loader.SafeLoader) + except FileNotFoundError as err: + msg = f"Error reading configuration in {config_file}: {err}" + raise FileNotFoundError(msg) from err + + +def extract_pathnames_from_log( + args +): + """ + Returns a list of pathnames from a GEOS-Chem log file. + + Args: + ----- + args : dict + Contains output from function parse_args. + + Returns: + -------- + paths : dict + paths["comments"]: Dry-run comment lines. + paths["found"] : List of file paths found on disk. + paths["missing"]: List of file paths that are missing. + paths["local_prefix"]: Local data directory root. + + Author: + ------- + Jiawei Zhuang (jiaweizhuang@g.harvard.edu) + Modified by Bob Yantosca (yantosca@seas.harvard.edu) + """ + + # Initialization + comments = ["!"*79, + "!!! LIST OF (UNIQUE) FILES REQUIRED FOR THE SIMULATION"] + data_found = set() + data_missing = set() + dryrun_log = args["dryrun_log"] + + # Open file (or die with error) + with open(dryrun_log, "r", encoding="UTF-8") as ifile: + + # Read data from the file line by line. + # Add file paths to the data_list set. + line = ifile.readline() + + while line: + + # Convert line to uppercase for string match + upcaseline = line.upper() + + # Search for data paths that have been found + if (": OPENING" in upcaseline) or (": READING" in upcaseline): + data_found.add(line.split()[-1]) + + # Search for data paths that are missing + elif "FILE NOT FOUND" in upcaseline: + data_missing.add(line.split()[-1]) + + # Search for certain dry-run comment strings + # (and make sure to prevent duplicates) + elif ("!!! STA" in upcaseline) or ("!!! END" in upcaseline) or \ + ("!!! SIM" in upcaseline) or ("!!! MET" in upcaseline) or \ + ("!!! GRI" in upcaseline): + if line.rstrip() not in comments: + comments.append(line.rstrip()) + + else: + pass + + # Read next line + line = ifile.readline() + + # Add another line to the comment list + comments.append("!"*79) + + # Convert sets to lists and sort in alphabetical order + found = sorted(list(data_found)) + missing = sorted(list(data_missing)) + + # Find the local data directory prefix (path to ExtData) + local_prefix = "" + for path in found + missing: + if "ExtData" in path: + index = path.find("ExtData") + local_prefix = path[:index] + if "ExtData" not in local_prefix: + local_prefix = os.path.join(local_prefix, "ExtData") + break + + # Exit if the local path does not contain ExtData + if len(local_prefix) == 0: + msg = \ + "Could not locate the ExtData folder in your local disk space!" + raise ValueError(msg) + + # Close file and return + # The "sorted" command will return unique values + ifile.close() + + paths = { + "comments": comments, + "found": found, + "missing": missing, + "local_prefix": local_prefix + } + return paths + + +def get_run_info(): + """ + Searches through the geoschem_config.yml file for GEOS-Chem + simulation parameters. + + Returns: + ------- + run_info : dict + Contains the GEOS-Chem run parameters: start_date, + start_time, end_date, end_time, met, grid, and sim. + """ + + # Create dictionary with GEOS-Chem simulation parameters + # NOTE: Numbers are returned as strings, and need to be converted + run_info = {} + + # Read start & end datetimes + config = read_config_file(TIME_FILE, to_str=True) + temp = config["START"].replace("-","").replace(":","").split() + run_info["start_date"] = int(temp[0]) + run_info["start_time"] = int(temp[1]) + temp = config["END"].replace("-","").replace(":","").split() + run_info["end_date"] = int(temp[0]) + run_info["end_time"] = int(temp[1]) + + # Get root data dir, met field, and resolution + with open(CONFIG_FILE, "r", encoding="UTF-8") as ifile: + for line in ifile: + line = line.strip() + if "END SECTION SETTINGS" in line: + break + if "ROOT:" in line: + run_info["root_data_dir"] = line.split(":")[-1].strip(" ") + if "MET:" in line: + run_info["met_field"] = line.split(":")[-1].strip(" ") + if "RES:" in line: + run_info["resolution"] = line.split(":")[-1].strip(" ") + run_info["grid"] = run_info["resolution"] + + return run_info + + +def replace_entry_in_list( + the_list, + old_entry, + new_entry +): + """ + Replaces a string entry in a list with a new entry. + + Args: + ----- + the_list : list of str + The list + old_entry : (str + Entry to replace + new_entry : str + Replacement text + + Returns: + -------- + the_list : list of str + The modified list + """ + return list(map(lambda x: x.replace(old_entry, new_entry), the_list)) + + +def write_unique_paths( + paths, + unique_log +): + """ + Writes unique data paths from dry-run output to a file. + + Args: + ----- + paths : dict + Contains output from function extract_pathnames_from_log. + + unique_log : str + Log file that will hold unique data paths. + """ + combined_paths = paths["found"] + paths["missing"] + combined_paths.sort() + + try: + with open(unique_log, "w", encoding="UTF-8") as ofile: + for comment in paths["comments"]: + print(comment, file=ofile) + for path in combined_paths: + print(path, file=ofile) + for comment in paths["comments"]: + print(comment, file=ofile) + ofile.close() + print(f"Log with unique file paths written to: {unique_log}") + except RuntimeError as exc: + raise RuntimeError(f"Could not write {unique_log}") from exc + + +def create_download_script( + paths, + args +): + """ + Creates a data download script to obtain missing files + from the s3://gcgrid bucket on the AWS cloud or from a + specified server. + + Args: + ----- + paths : dict + Contains output from function extract_pathnames_from_log. + args : dict + Contains output from function parse_args. + """ + + # Extract portal parameters + portal_name = args["portal"] + portal = args["config"]["portals"][portal_name] + is_s3_bucket = portal["s3_bucket"] + remote_root = portal["remote"] + quote = portal["quote"] + cmd_prefix = portal["command"] + if "@PATH@" in cmd_prefix: + cmd_prefix = cmd_prefix.replace("@PATH@", paths["local_prefix"]) + + # Create the data download script + with open(DATA_DOWNLOAD_SCRIPT, "w", encoding="UTF-8") as ofile: + + # Write shebang line to script + print("#!/bin/bash\n", file=ofile) + print("# This script was generated by download_data.py\n", file=ofile) + + # Write download commands for only the missing data files + for path in paths["missing"]: + + # We do not download HEMCO restart files + if "HEMCO_restart" in path: + continue + + # Write path + index = path.find("ExtData") + 7 + local_dir = os.path.dirname(path) + remote_path = remote_root + path[index:] + cmd = cmd_prefix + quote + remote_path + quote + if is_s3_bucket: + cmd += " " + local_dir + "/" + print(cmd, file=ofile) + print(file=ofile) + + # Close file and make it executable + ofile.close() + os.chmod(DATA_DOWNLOAD_SCRIPT, 0o755) + +def download_the_data( + args +): + """ + Downloads GEOS-Chem data files from the AWS s3://gcgrid bucket + or from a specified server. + + Args: + ----- + args : dict + Output of runction parse_args. + """ + + # Get information about the run + run_info = get_run_info() + + # Get a unique list of data paths, both found and missing: + # Expand the data paths to include links to restart files + paths = extract_pathnames_from_log(args) + + # Write a list of unique file paths + write_unique_paths(paths, args["dryrun_log"] + ".unique") + + # Exit without downloading if skip-download flag was specified + if args["skip_download"]: + return + + # Print a message + if len(args["portal"]) > 0: + print(f"Downloading data from {args['portal']}") + + # Create script to download missing files from AWS S3 + create_download_script(paths, args) + + #### DEBUG: Uncomment this if you wish to see the download script + #if args["skip_download"]: + # return + + # Run the data download script and return the status + # Remove the file afterwards + status = subprocess.call(DATA_DOWNLOAD_SCRIPT) + os.remove(DATA_DOWNLOAD_SCRIPT) + + # Raise an exception if the data was not successfully downloaded + if status != 0: + msg = f"Error downloading data from {args['portal']}" + raise RuntimeError(msg) + + +def parse_args(): + """ + Reads global settings from the download_data.yml configuration file. + Also parses command-line arguments and returns a dictionary + containing all of these settings. + + Returns: + -------- + args : dict + args["config"] : Dict with global settings from download_data.yml + args["dryrun_log"] Name of the GEOS-Chem dry-run log file + args["portal"]: Name of the remote portal for download + args["skip_download"]: Are we skipping the download? (T/F) + """ + dryrun_log = None + dryrun_found = False + portal_found = False + portal_remote = None + skip_download = False + skip_found = False + + # Read the YAML configuration file + config = read_config_file("download_data.yml") + + # Get a list of portal names + short names + portal_list = list(config["portals"].keys()) + short_name_list = [] + for mir in portal_list: + short_name_list.append(config["portals"][mir]["short_name"]) + + # Parse command-line arguments (argument 0 is the program name) + for i in range(1, len(sys.argv)): + arg = sys.argv[i].lower() + arg = arg.lstrip('-') + + if not dryrun_found: + dryrun_log = arg + dryrun_found = True + continue + + if not portal_found: + for mir in portal_list: + portal = mir.lower() + short_name = config["portals"][mir]["short_name"].lower() + if arg in portal or arg in short_name: + portal_remote = portal + portal_found = True + continue + + if not skip_found: + if "skip" in arg: + skip_download = True + skip_found = True + continue + + + if dryrun_log is None: + msg = "The dryrun log file was not supplied! Exiting ..." + raise ValueError(msg) + + if portal_remote is None and not skip_download: + msg = "Portal name missing or invalid! Exiting ..." + raise ValueError(msg) + + args = { + "config": config, + "dryrun_log": dryrun_log, + "portal": portal_remote, + "skip_download": skip_download + } + return args + + +def main(): + """ + Main program. Gets command-line arguments and calls function + download_the_data to initiate a data-downloading process. + + Calling sequence: + ----------------- + ./download_data.py log PORTAL-NAME + ./download_data.py log -skip-download # Print unique log & exit + """ + + # Download the data files from the remote server + download_the_data(parse_args()) + + +if __name__ == "__main__": + main() diff --git a/run/download_data.yml b/run/download_data.yml new file mode 100644 index 00000000..71db29d2 --- /dev/null +++ b/run/download_data.yml @@ -0,0 +1,51 @@ +--- +# +# Configuration file for the download_data.py script. +# You should not have to modify this file unless a new data portal +# comes online, or the default restart files are updated. +# +# +# GEOS-Chem data portals +portals: + + # GEOS-Chem Input Data portal, download via AWS CLI + geoschem+aws: + short_name: ga + s3_bucket: True + remote: s3://geos-chem + command: 'aws s3 cp ' + quote: "" + + # GEOS-Chem Input Data portal, download via HTTP/wget + # NOTE: Use geoschemdata.wustl.edu as a backup + geoschem+http: + short_name: gh + s3_bucket: False + remote: https://geos-chem.s3-us-west-2.amazonaws.com + #remote: http://geoschemdata.wustl.edu/ExtData + command: 'wget -r -np -nH -R "*.html" -N -P @PATH@ ' + quote: '"' + + # GEOS-Chem Nested Input Data portal, download via AWS CLI + nested+aws: + short_name: na + s3_bucket: True + remote: s3://gcgrid + command: 'aws s3 cp ' + quote: "" + + # GEOS-Chem Nested Input Data portal, download via HTTP/wget + nested+http: + short_name: nh + s3_bucket: False + remote: https://gcgrid.s3.amazonaws.com/ + command: 'aws s3 cp ' + quote: "" + + # GCAP 2.0 @ U. Rochester data portal, download via HTTP/wget + rochester: + short_name: ur + s3_bucket: False + remote: http://atmos.earth.rochester.edu/input/gc/ExtData + command: 'wget -r -np -nH -R "*.html" -N --cut-dirs=2 -P @PATH@ ' + quote: '"' diff --git a/run/runHEMCO.sh b/run/runHEMCO.sh index 7dab11ab..9a25b1a1 100755 --- a/run/runHEMCO.sh +++ b/run/runHEMCO.sh @@ -3,7 +3,7 @@ #SBATCH -c 8 #SBATCH -N 1 #SBATCH -t 0-12:00 -#SBATCH -p huce_intel +#SBATCH -p sapphire,huce_cascade,seas_compute,shared #SBATCH --mem=15000 #SBATCH --mail-type=END @@ -12,7 +12,7 @@ export OMP_NUM_THREADS=$SLURM_CPUS_PER_TASK # Run GEOS_Chem. The "time" command will return CPU and wall times. # Stdout and stderr will be directed to the log files specified above. -time ./hemco_standalone -c HEMCO_sa_Config.rc +time ./hemco_standalone -c HEMCO_sa_Config.rc > HEMCO_sa.log 2>&1 # Exit normally exit 0 diff --git a/src/Core/hco_error_mod.F90 b/src/Core/hco_error_mod.F90 index cc75c328..70dac754 100644 --- a/src/Core/hco_error_mod.F90 +++ b/src/Core/hco_error_mod.F90 @@ -105,7 +105,7 @@ MODULE HCO_Error_Mod #endif ! HEMCO version number. - CHARACTER(LEN=12), PARAMETER, PUBLIC :: HCO_VERSION = '3.9.3' + CHARACTER(LEN=12), PARAMETER, PUBLIC :: HCO_VERSION = '3.10.0' INTERFACE HCO_Error MODULE PROCEDURE HCO_ErrorNoErr diff --git a/src/Core/hco_geotools_mod.F90 b/src/Core/hco_geotools_mod.F90 index b95b392e..455c1cd1 100644 --- a/src/Core/hco_geotools_mod.F90 +++ b/src/Core/hco_geotools_mod.F90 @@ -814,6 +814,7 @@ SUBROUTINE HCO_CalcVertGrid ( HcoState, PSFC, ZSFC, TK, BXHEIGHT, PEDGE, RC ) LOGICAL, SAVE :: EVAL_TK = .TRUE. LOGICAL, SAVE :: EVAL_PEDGE = .TRUE. LOGICAL, SAVE :: EVAL_BXHEIGHT = .TRUE. + LOGICAL, SAVE :: DO_SCALE_PSFC = .FALSE. !------------------------------- ! HCO_CalcVertGrid begins here @@ -1178,11 +1179,26 @@ SUBROUTINE HCO_CalcVertGrid ( HcoState, PSFC, ZSFC, TK, BXHEIGHT, PEDGE, RC ) FoundPSFC = .TRUE. ENDIF - ! Set PEDGE + ! Test if we need to convert surface pressure (PSFC) from hPa to + ! Pa. Only do this test on the first timestep for efficiency. + IF ( FIRST ) THEN + DO_SCALE_PSFC = ( MINVAL( HcoState%Grid%PSFC%Val) < 10000.0_hp ) + ENDIF + + ! Convert PSFC from hPa to Pa if necessary (on each timestep). + IF ( DO_SCALE_PSFC ) THEN + HcoState%Grid%PSFC%Val = HcoState%Grid%PSFC%Val * 100.0_hp + ENDIF + + ! If we are using HEMCO in a CTM or ESM, then PEDGE will have + ! been passed to HEMCO as an input. If PEDGE has not been passed + ! (e.g. HEMCO standalone), then compute it here using the surface + ! pressure PSFC and the Ap and Bp hybrid grid parameters. IF ( .NOT. FoundPEDGE ) THEN !$OMP PARALLEL DO & !$OMP DEFAULT( SHARED ) & - !$OMP PRIVATE( I, J, L ) + !$OMP PRIVATE( I, J, L ) & + !$OMP COLLAPSE( 3 ) DO L = 1, HcoState%NZ+1 DO J = 1, HcoState%NY DO I = 1, HcoState%NX diff --git a/src/Extensions/hcox_finn_mod.F90 b/src/Extensions/hcox_finn_mod.F90 index 2faf8031..4d13a4e3 100644 --- a/src/Extensions/hcox_finn_mod.F90 +++ b/src/Extensions/hcox_finn_mod.F90 @@ -792,7 +792,7 @@ SUBROUTINE HCOX_FINN_Init( HcoState, ExtName, ExtState, RC ) ! Species listed in VOC speciation table Inst%FINN_SPEC_NAME(11) = 'ACET' - Inst%FINN_SPEC_NAME(12) = 'ACTA' ! Not currently emitted by BB in GC + Inst%FINN_SPEC_NAME(12) = 'ACTA' Inst%FINN_SPEC_NAME(13) = 'ALD2' Inst%FINN_SPEC_NAME(14) = 'ALK4' Inst%FINN_SPEC_NAME(15) = 'APINE' ! Currently lumped into MTPA @@ -803,39 +803,39 @@ SUBROUTINE HCOX_FINN_Init( HcoState, ExtName, ExtState, RC ) Inst%FINN_SPEC_NAME(20) = 'C2H4' Inst%FINN_SPEC_NAME(21) = 'C2H6' Inst%FINN_SPEC_NAME(22) = 'C3H8' - Inst%FINN_SPEC_NAME(23) = 'CARENE' ! Currently lumped into MTPA + Inst%FINN_SPEC_NAME(23) = 'CARENE' ! Currently lumped into MTPO Inst%FINN_SPEC_NAME(24) = 'CH2Br2' Inst%FINN_SPEC_NAME(25) = 'CH2O' Inst%FINN_SPEC_NAME(26) = 'CH3Br' Inst%FINN_SPEC_NAME(27) = 'CH3CN' Inst%FINN_SPEC_NAME(28) = 'CH3I' Inst%FINN_SPEC_NAME(29) = 'DMS' - Inst%FINN_SPEC_NAME(30) = 'EOH' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(31) = 'ETBENZ' ! Currently lumped with TOLU - Inst%FINN_SPEC_NAME(32) = 'FUR' ! Currently not used + Inst%FINN_SPEC_NAME(30) = 'EOH' + Inst%FINN_SPEC_NAME(31) = 'ETBENZ' + Inst%FINN_SPEC_NAME(32) = 'FUR' Inst%FINN_SPEC_NAME(33) = 'GLYC' Inst%FINN_SPEC_NAME(34) = 'GLYX' Inst%FINN_SPEC_NAME(35) = 'HAC' Inst%FINN_SPEC_NAME(36) = 'HCN' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(37) = 'HCOOH' ! Not currently emitted by BB in GC + Inst%FINN_SPEC_NAME(37) = 'HCOOH' Inst%FINN_SPEC_NAME(38) = 'HNO2' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(39) = 'ISOP' ! Not currently emitted by BB in GC + Inst%FINN_SPEC_NAME(39) = 'ISOP' Inst%FINN_SPEC_NAME(40) = 'LIMO' - Inst%FINN_SPEC_NAME(41) = 'MACR' ! Not currently emitted in GC + Inst%FINN_SPEC_NAME(41) = 'MACR' Inst%FINN_SPEC_NAME(42) = 'MEK' Inst%FINN_SPEC_NAME(43) = 'MGLY' Inst%FINN_SPEC_NAME(44) = 'MNO3' - Inst%FINN_SPEC_NAME(45) = 'MOH' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(46) = 'MTPO' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(47) = 'MVK' ! Not currently emitted in GC + Inst%FINN_SPEC_NAME(45) = 'MOH' + Inst%FINN_SPEC_NAME(46) = 'MTPO' + Inst%FINN_SPEC_NAME(47) = 'MVK' Inst%FINN_SPEC_NAME(48) = 'PRPE' - Inst%FINN_SPEC_NAME(49) = 'R4N2' ! Not currently emitted in GC - Inst%FINN_SPEC_NAME(50) = 'RCHO' ! Not currently emitted by BB in GC - Inst%FINN_SPEC_NAME(51) = 'RCOOH' ! Currently not used - Inst%FINN_SPEC_NAME(52) = 'ROH' ! Currently not used + Inst%FINN_SPEC_NAME(49) = 'R4N2' + Inst%FINN_SPEC_NAME(50) = 'RCHO' + Inst%FINN_SPEC_NAME(51) = 'RCOOH' + Inst%FINN_SPEC_NAME(52) = 'ROH' Inst%FINN_SPEC_NAME(53) = 'SESQ' ! Currently not used - Inst%FINN_SPEC_NAME(54) = 'STYR' ! Currently lumped with TOLU - Inst%FINN_SPEC_NAME(55) = 'TMB' ! Currently lumped with XYLE + Inst%FINN_SPEC_NAME(54) = 'STYR' + Inst%FINN_SPEC_NAME(55) = 'TMB' Inst%FINN_SPEC_NAME(56) = 'TOLU' Inst%FINN_SPEC_NAME(57) = 'XYLE' Inst%FINN_SPEC_NAME(58) = 'H2' ! Currently not used @@ -973,6 +973,8 @@ SUBROUTINE HCOX_FINN_Init( HcoState, ExtName, ExtState, RC ) IF ( TRIM(SpcName) == 'MTPA' ) SpcName = 'APINE' IF ( TRIM(SpcName) == 'Hg0' ) SpcName = 'CO' IF ( TRIM(SpcName) == 'SOAP' ) SpcName = 'CO' + IF ( TRIM(SpcName) == 'EBZ' ) SpcName = 'ETBENZ' + IF ( TRIM(SpcName) == 'FURA' ) SpcName = 'FUR' ! For lumped species, we have to repeat the lookup multiple times, ! so use a while loop here. For example, for species TOLU this will @@ -1113,35 +1115,40 @@ SUBROUTINE HCOX_FINN_Init( HcoState, ExtName, ExtState, RC ) ! HEMCO species ID to multiple FINN species, so that all of ! them will be added to the same model species. - ! --> TMB is lumped into XYLE - IF ( Inst%SpcNames(Inst%nSpc) == 'XYLE' ) THEN - IF ( N_LUMPED == 0 ) THEN - SpcName = 'TMB' - Missing = .TRUE. - N_LUMPED = N_LUMPED + 1 - ENDIF - ENDIF - - ! --> ETBENZ and STYR are lumped into TOLU - IF ( Inst%SpcNames(Inst%nSpc) == 'TOLU' ) THEN + ! --> TMB is no longer lumped into XYLE + !IF ( Inst%SpcNames(Inst%nSpc) == 'XYLE' ) THEN + ! IF ( N_LUMPED == 0 ) THEN + ! SpcName = 'TMB' + ! Missing = .TRUE. + ! N_LUMPED = N_LUMPED + 1 + ! ENDIF + !ENDIF + + ! --> ETBENZ and STYR are no longer lumped into TOLU + !IF ( Inst%SpcNames(Inst%nSpc) == 'TOLU' ) THEN + ! IF ( N_LUMPED == 0 ) THEN + ! SpcName = 'ETBENZ' + ! Missing = .TRUE. + ! N_LUMPED = N_LUMPED + 1 + ! ELSEIF ( N_LUMPED == 1 ) THEN + ! SpcName = 'STYR' + ! Missing = .TRUE. + ! N_LUMPED = N_LUMPED + 1 + ! ENDIF + !ENDIF + + ! --> BPINE is lumped into MTPA + IF ( Inst%SpcNames(Inst%nSpc) == 'MTPA' ) THEN IF ( N_LUMPED == 0 ) THEN - SpcName = 'ETBENZ' - Missing = .TRUE. - N_LUMPED = N_LUMPED + 1 - ELSEIF ( N_LUMPED == 1 ) THEN - SpcName = 'STYR' + SpcName = 'BPINE' Missing = .TRUE. N_LUMPED = N_LUMPED + 1 ENDIF ENDIF - ! --> BPINE and CARENE are lumped into MTPA - IF ( Inst%SpcNames(Inst%nSpc) == 'MTPA' ) THEN + ! --> CARENE is lumped into MTPO + IF ( Inst%SpcNames(Inst%nSpc) == 'MTPO' ) THEN IF ( N_LUMPED == 0 ) THEN - SpcName = 'BPINE' - Missing = .TRUE. - N_LUMPED = N_LUMPED + 1 - ELSEIF ( N_LUMPED == 1 ) THEN SpcName = 'CARENE' Missing = .TRUE. N_LUMPED = N_LUMPED + 1 @@ -1435,7 +1442,7 @@ SUBROUTINE InstRemove ( Instance ) ENDIF Inst%FinnIDs => NULL() - IF ( ASSOCIATED( Inst%HcoIDs ) ) THEN + IF ( ASSOCIATED( Inst%HcoIDs ) ) THEN DEALLOCATE( Inst%HcoIDs ) ENDIF Inst%HcoIDs => NULL() diff --git a/src/Extensions/hcox_gfed_include_gfed4.H b/src/Extensions/hcox_gfed_include_gfed4.H index 148368e2..8e03c338 100644 --- a/src/Extensions/hcox_gfed_include_gfed4.H +++ b/src/Extensions/hcox_gfed_include_gfed4.H @@ -372,7 +372,7 @@ Inst%GFED4_EMFAC(36,5)=1.4E-03_hp Inst%GFED4_EMFAC(36,6)=1.7E-03_hp ! 1,3-butadiene -GFED4_SPEC_NAME(37)="BUTA" +GFED4_SPEC_NAME(37)="C4H6" Inst%GFED4_EMFAC(37,1)=9.5E-05_hp Inst%GFED4_EMFAC(37,2)=8.9E-05_hp Inst%GFED4_EMFAC(37,3)=1.25E-04_hp @@ -426,13 +426,13 @@ Inst%GFED4_EMFAC(40,5)=1.10E-03_hp !PET - Peatland Inst%GFED4_EMFAC(40,6)=1.03E-03_hp !AGW - Crop residue ! Acrolein -!GFED4_SPEC_NAME(36)="ACRO" -!Inst%GFED4_EMFAC(36,1)=4.8E-04_hp -!Inst%GFED4_EMFAC(36,2)=3.3E-04_hp -!Inst%GFED4_EMFAC(36,3)=3.4E-04_hp -!Inst%GFED4_EMFAC(36,4)=6.5E-04_hp -!Inst%GFED4_EMFAC(36,5)=2.7E-04_hp -!Inst%GFED4_EMFAC(36,6)=6.2E-04_hp +GFED4_SPEC_NAME(41)="ACR" +Inst%GFED4_EMFAC(41,1)=4.8E-04_hp +Inst%GFED4_EMFAC(41,2)=3.3E-04_hp +Inst%GFED4_EMFAC(41,3)=3.4E-04_hp +Inst%GFED4_EMFAC(41,4)=6.5E-04_hp +Inst%GFED4_EMFAC(41,5)=2.7E-04_hp +Inst%GFED4_EMFAC(41,6)=6.2E-04_hp ! butenenitriles !GFED4_SPEC_NAME(42)="BNIT" @@ -444,13 +444,13 @@ Inst%GFED4_EMFAC(40,6)=1.03E-03_hp !AGW - Crop residue !Inst%GFED4_EMFAC(42,6)=2.2E-04_hp !AGW - Crop residue ! Styrene -!GFED4_SPEC_NAME(43)="STYR" -!Inst%GFED4_EMFAC(43,1)=5.6E-05_hp !SAV - Savannah -!Inst%GFED4_EMFAC(43,2)=1.3E-04_hp !BORF - Boreal -!Inst%GFED4_EMFAC(43,3)=6.6E-05_hp !TEMP - Temperate -!Inst%GFED4_EMFAC(43,4)=2.8E-05_hp !DEFO - Tropical -!Inst%GFED4_EMFAC(43,5)=5.5E-05_hp !PET - Peatland -!Inst%GFED4_EMFAC(43,6)=4.3E-05_hp !AGW - Crop residue +GFED4_SPEC_NAME(43)="STYR" +Inst%GFED4_EMFAC(43,1)=5.6E-05_hp !SAV - Savannah +Inst%GFED4_EMFAC(43,2)=1.3E-04_hp !BORF - Boreal +Inst%GFED4_EMFAC(43,3)=6.6E-05_hp !TEMP - Temperate +Inst%GFED4_EMFAC(43,4)=2.8E-05_hp !DEFO - Tropical +Inst%GFED4_EMFAC(43,5)=5.5E-05_hp !PET - Peatland +Inst%GFED4_EMFAC(43,6)=4.3E-05_hp !AGW - Crop residue ! Phenol GFED4_SPEC_NAME(44)="PHEN" diff --git a/src/Extensions/hcox_soilnox_mod.F90 b/src/Extensions/hcox_soilnox_mod.F90 index 85cf0b78..871b8694 100644 --- a/src/Extensions/hcox_soilnox_mod.F90 +++ b/src/Extensions/hcox_soilnox_mod.F90 @@ -104,6 +104,7 @@ MODULE HCOX_SoilNOx_Mod INTEGER :: Instance INTEGER :: ExtNr ! Extension number INTEGER :: IDTNO ! NO tracer ID + LOGICAL :: UseSoilTemp ! Use soil temperature? LOGICAL :: LFERTILIZERNOX ! Use fertilizer NOx? REAL(hp) :: FERT_SCALE ! fertilizer scale factor @@ -298,20 +299,28 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) REAL(dp), ALLOCATABLE :: VecDp(:) LOGICAL :: FIRST LOGICAL :: aIR, FOUND - CHARACTER(LEN= 31) :: DiagnName - CHARACTER(LEN=255) :: MSG, DMY, LOC + CHARACTER(LEN= 31) :: DiagnName, Dummy + CHARACTER(LEN=255) :: ErrMsg, ThisLoc TYPE(MyInst), POINTER :: Inst !================================================================= ! HCOX_SoilNOx_RUN begins here! !================================================================= - LOC = 'HCOX_SoilNOx_RUN (HCOX_SOILNOX_MOD.F90)' + + ! Define strings for error messgaes + ErrMsg = '' + ThisLoc = & + 'HCOX_SoilNOx_Run (in module HEMCO/Extensions/hcox_soilnox_mod.F90)' + + ! Assume success + RC = HCO_SUCCESS ! Enter - CALL HCO_ENTER( HcoState%Config%Err, LOC, RC ) + CALL HCO_ENTER( HcoState%Config%Err, ThisLoc, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 0', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error entering soil NOx extension' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Return if extension disabled @@ -323,8 +332,8 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) ! Get Instance CALL InstGet ( ExtState%SoilNox, Inst, RC ) IF ( RC /= HCO_SUCCESS ) THEN - WRITE(MSG,*) 'Cannot find soil NOx instance Nr. ', ExtState%SoilNOx - CALL HCO_ERROR(MSG,RC) + WRITE(ErrMsg,*) 'Cannot find soil NOx instance number ', ExtState%SoilNOx + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) RETURN ENDIF @@ -339,138 +348,191 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) !IF ( FIRST ) THEN CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK1', Inst%LANDTYPE(1)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 1', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK1' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK2', Inst%LANDTYPE(2)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 2', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK2' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK3', Inst%LANDTYPE(3)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 3', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK3' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK4', Inst%LANDTYPE(4)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 4', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK4' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK5', Inst%LANDTYPE(5)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 5', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK5' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK6', Inst%LANDTYPE(6)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 6', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK6' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK7', Inst%LANDTYPE(7)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 7', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK7' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK8', Inst%LANDTYPE(8)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 8', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK8' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK9', Inst%LANDTYPE(9)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 9', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK9' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK10', Inst%LANDTYPE(10)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 10', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK10' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK11', Inst%LANDTYPE(11)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 11', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK11' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK12', Inst%LANDTYPE(12)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 12', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK12' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK13', Inst%LANDTYPE(13)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 13', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK13' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK14', Inst%LANDTYPE(14)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 14', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK14' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK15', Inst%LANDTYPE(15)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 15', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK15' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK16', Inst%LANDTYPE(16)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 16', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK16' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK17', Inst%LANDTYPE(17)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 17', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK17' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK18', Inst%LANDTYPE(18)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 18', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK18' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK19', Inst%LANDTYPE(19)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 19', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK19' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK20', Inst%LANDTYPE(20)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 20', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK20' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK21', Inst%LANDTYPE(21)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 21', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK21' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK22', Inst%LANDTYPE(22)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 22', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK22' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK23', Inst%LANDTYPE(23)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 23', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK23' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_LANDK24', Inst%LANDTYPE(24)%VAL, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 24', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_LANDK24' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_FERT', Inst%SOILFERT, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 25', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_FERT' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_ARID', Inst%CLIMARID, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 26', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_ARID' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF + CALL HCO_EvalFld( HcoState, 'SOILNOX_NONARID', Inst%CLIMNARID, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 27', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error evaluating field SOILNOX_NONARID' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF IF ( FIRST ) THEN @@ -478,17 +540,19 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) ! read it from settings. IF ( .NOT. ASSOCIATED(ExtState%DRYCOEFF) ) THEN CALL GetExtOpt( HcoState%Config, Inst%ExtNr, 'DRYCOEFF', & - OptValChar=DMY, FOUND=FOUND, RC=RC ) + OptValChar=Dummy, FOUND=FOUND, RC=RC ) IF ( .NOT. FOUND ) THEN - CALL HCO_ERROR( 'DRYCOEFF not defined', RC ) + ErrMsg = 'DRYCOEFF not defined' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) RETURN ENDIF ALLOCATE(VecDp(MaxDryCoeff)) - CALL HCO_CharSplit( DMY, HCO_GetOpt(HcoState%Config%ExtList,'Separator'), & + CALL HCO_CharSplit( Dummy, HCO_GetOpt(HcoState%Config%ExtList,'Separator'), & HCO_GetOpt(HcoState%Config%ExtList,'Wildcard'), VecDp, N, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 28', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error reading DRYCOEFF' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ALLOCATE(Inst%DRYCOEFF(N)) Inst%DRYCOEFF(1:N) = VecDp(1:N) @@ -524,22 +588,24 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) CALL HCO_RestartGet( HcoState, 'DEP_RESERVOIR', & Inst%DEP_RESERVOIR, RC, Def2D=Def2D ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 29', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error getting field DEP_RESERVOIR' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! GWET_PREV [unitless] CALL HCO_RestartGet( HcoState, 'GWET_PREV', & Inst%GWET_PREV, RC, FILLED=FOUND ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 30', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error getting field GWET_PREV' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF IF ( .NOT. FOUND ) THEN Inst%GWET_PREV = 0.0_sp IF ( HcoState%amIRoot ) THEN - MSG = 'Cannot find GWET_PREV restart variable - initialized to 0.0!' - CALL HCO_WARNING(HcoState%Config%Err,MSG,RC) + ErrMsg = 'Cannot find GWET_PREV restart variable - initialized to 0.0!' + CALL HCO_WARNING( HcoState%Config%Err, ErrMsg, RC ) ENDIF ENDIF @@ -547,14 +613,15 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) CALL HCO_RestartGet( HcoState, 'PFACTOR', & Inst%PFACTOR, RC, FILLED=FOUND ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 31', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error getting field PFACTOR' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF IF ( .NOT. FOUND ) THEN Inst%PFACTOR = 1.0_sp IF ( HcoState%amIRoot ) THEN - MSG = 'Cannot find PFACTOR restart variable - initialized to 1.0!' - CALL HCO_WARNING(HcoState%Config%Err,MSG,RC) + ErrMsg = 'Cannot find PFACTOR restart variable - initialized to 1.0!' + CALL HCO_WARNING( HcoState%Config%Err, ErrMsg, RC ) ENDIF ENDIF @@ -562,14 +629,15 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) CALL HCO_RestartGet( HcoState, 'DRYPERIOD', & Inst%DRYPERIOD, RC, FILLED=FOUND ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 32', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error ≈getting field DRYPERIOD' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF IF ( .NOT. FOUND ) THEN Inst%DRYPERIOD = 0.0_sp IF ( HcoState%amIRoot ) THEN - MSG = 'Cannot find DRYPERIOD restart variable - initialized to 0.0!' - CALL HCO_WARNING(HcoState%Config%Err,MSG,RC) + ErrMsg = 'Cannot find DRYPERIOD restart variable - initialized to 0.0!' + CALL HCO_WARNING( HcoState%Config%Err, ErrMsg, RC ) ENDIF ENDIF @@ -581,8 +649,9 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) ! Now a function of the new MODIS/Koppen biome map (J.D. Maasakkers) CALL GET_CANOPY_NOX( HcoState, ExtState, Inst, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 33', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error in GET_CANOPY_NOX' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Init @@ -653,8 +722,9 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) ! Eventually apply spatiotemporal scale factors CALL HCOX_SCALE ( HcoState, FLUX_2D, TRIM(Inst%SpcScalFldNme(1)), RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 34', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error applying scale factors' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF !----------------------------------------------------------------- @@ -665,7 +735,8 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) CALL HCO_EmisAdd( HcoState, FLUX_2D, Inst%IDTNO, & RC, ExtNr=Inst%ExtNr ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'HCO_EmisAdd error', RC ) + ErrMsg = 'Error adding emissions fluxes' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) RETURN ENDIF @@ -676,29 +747,33 @@ SUBROUTINE HCOX_SoilNOx_Run( ExtState, HcoState, RC ) ! DEP_RESERVOIR [kg/m3] CALL HCO_RestartWrite( HcoState, 'DEP_RESERVOIR', Inst%DEP_RESERVOIR, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 35', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error writing field DEP_RESERVOIR to restart' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! GWET_PREV [unitless] CALL HCO_RestartWrite( HcoState, 'GWET_PREV', Inst%GWET_PREV, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 36', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error writing field GWET_PREV to restart' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! PFACTOR [unitless] CALL HCO_RestartWrite( HcoState, 'PFACTOR', Inst%PFACTOR, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 37', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error writing field PFACTOR to restart' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! DRYPERIOD [unitless] CALL HCO_RestartWrite( HcoState, 'DRYPERIOD', Inst%DRYPERIOD, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 38', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error writing field DRYPERIOD to restart' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Leave w/ success @@ -748,7 +823,7 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) ! !LOCAL VARIABLES: ! INTEGER :: ExtNr - CHARACTER(LEN=255) :: MSG, LOC + CHARACTER(LEN=255) :: MSG, ErrMsg, ThisLoc CHARACTER(LEN=31), ALLOCATABLE :: SpcNames(:) INTEGER, ALLOCATABLE :: HcoIDs(:) INTEGER :: nSpc, I, J, II, AS @@ -757,24 +832,33 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) !================================================================= ! HCOX_SoilNOx_INIT begins here! !================================================================= - LOC = 'HCOX_SoilNOx_INIT (HCOX_SOILNOX_MOD.F90)' + + ! Define strings for error messgaes + ErrMsg = '' + ThisLoc = & + ' -> in HCOX_SoilNOx_Init (in module HEMCO/Extensions/hcox_soilnox_mod.F90)' + + ! Assume success + RC = HCO_SUCCESS ! Extension Nr. ExtNr = GetExtNr( HcoState%Config%ExtList, TRIM(ExtName) ) IF ( ExtNr <= 0 ) RETURN ! Enter - CALL HCO_ENTER( HcoState%Config%Err, LOC, RC ) + CALL HCO_ENTER( HcoState%Config%Err, ThisLoc, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 39', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error entering soil NOx extension' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Create instance Inst => NULL() CALL InstCreate ( ExtNr, ExtState%SoilNox, Inst, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR ( 'Cannot create soil NOx instance', RC ) + ErrMsg = 'Cannot create soil NOx instance' + CALL HCO_ERROR ( ErrMsg, RC ) RETURN ENDIF @@ -785,11 +869,20 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) ! Read settings specified in configuration file ! Note: the specified strings have to match those in ! the config. file! + CALL GetExtOpt( HcoState%Config, ExtNr, 'UseSoilTemperature', & + OptValBool=Inst%UseSoilTemp, RC=RC ) + IF ( RC /= HCO_SUCCESS ) THEN + ErrMsg = 'Use soil temperature not specified' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN + ENDIF + CALL GetExtOpt( HcoState%Config, ExtNr, 'Use fertilizer NOx', & - OptValBool=Inst%LFERTILIZERNOX, RC=RC ) + OptValBool=Inst%LFERTILIZERNOX, RC=RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 40', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Use fertilizer NOx not specified' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Get global scale factor @@ -798,12 +891,13 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) ! Get HEMCO species IDs CALL HCO_GetExtHcoID( HcoState, ExtNr, HcoIDs, SpcNames, nSpc, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 41', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'No soil NOx species speficied' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF IF ( nSpc /= 1 ) THEN - MSG = 'Module soil NOx accepts only one species!' - CALL HCO_ERROR(MSG, RC ) + ErrMsg = 'Module soil NOx accepts only one species!' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) RETURN ENDIF Inst%IDTNO = HcoIDs(1) @@ -812,15 +906,17 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) CALL GetExtSpcVal( HcoState%Config, ExtNr, nSpc, & SpcNames, 'Scaling', 1.0_sp, Inst%SpcScalVal, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 42', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error reading species scale factor' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF CALL GetExtSpcVal( HcoState%Config, ExtNr, nSpc, & SpcNames, 'ScaleField', HCOX_NOSCALE, Inst%SpcScalFldNme, RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 43', RC, THISLOC=LOC ) - RETURN + ErrMsg = 'Error reading ScaleField' + CALL HCO_ERROR( ErrMsg, RC, ThisLoc ) + RETURN ENDIF ! Verbose mode @@ -841,6 +937,8 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) CALL HCO_MSG(HcoState%Config%Err,MSG) WRITE(MSG,*) ' - NOx scale field : ', TRIM(Inst%SpcScalFldNme(1)) CALL HCO_MSG(HcoState%Config%Err,MSG) + WRITE(MSG,*) ' - Use soil temperature : ', Inst%UseSoilTemp + CALL HCO_MSG(HcoState%Config%Err,MSG) WRITE(MSG,*) ' - Use fertilizer NOx : ', Inst%LFERTILIZERNOX CALL HCO_MSG(HcoState%Config%Err,MSG) WRITE(MSG,*) ' - Fertilizer scale factor: ', Inst%FERT_SCALE @@ -938,35 +1036,35 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) Trgt2D = Inst%FertNO_Diag, & RC = RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 44', RC, THISLOC=LOC ) + CALL HCO_ERROR( 'ERROR 44', RC, ThisLoc ) RETURN ENDIF CALL HCO_RestartDefine( HcoState, 'PFACTOR', & Inst%PFACTOR, '1', RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 45', RC, THISLOC=LOC ) + CALL HCO_ERROR( 'ERROR 45', RC, ThisLoc ) RETURN ENDIF CALL HCO_RestartDefine( HcoState, 'DRYPERIOD', & Inst%DRYPERIOD, '1', RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 46', RC, THISLOC=LOC ) + CALL HCO_ERROR( 'ERROR 46', RC, ThisLoc ) RETURN ENDIF CALL HCO_RestartDefine( HcoState, 'GWET_PREV', & Inst%GWET_PREV, '1', RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 47', RC, THISLOC=LOC ) + CALL HCO_ERROR( 'ERROR 47', RC, ThisLoc ) RETURN ENDIF CALL HCO_RestartDefine( HcoState, ' DEP_RESERVOIR', & Inst%DEP_RESERVOIR, 'kg/m3', RC ) IF ( RC /= HCO_SUCCESS ) THEN - CALL HCO_ERROR( 'ERROR 48', RC, THISLOC=LOC ) + CALL HCO_ERROR( 'ERROR 48', RC, ThisLoc ) RETURN ENDIF @@ -976,6 +1074,9 @@ SUBROUTINE HCOX_SoilNOx_Init( HcoState, ExtName, ExtState, RC ) ! Activate required met fields ExtState%T2M%DoUse = .TRUE. + IF (Inst%UseSoilTemp) THEN + ExtState%TSOIL1%DoUse = .TRUE. + ENDIF ExtState%GWETTOP%DoUse = .TRUE. ExtState%SUNCOS%DoUse = .TRUE. ExtState%U10M%DoUse = .TRUE. @@ -1146,7 +1247,7 @@ SUBROUTINE Soil_NOx_Emission( ExtState, Inst, TS_EMIS, I, & ! INTEGER :: K REAL(hp) :: BASE_TERM, CRF_TERM, PULSE - REAL(hp) :: TC, TEMP_TERM, WINDSQR + REAL(hp) :: TC, TSOIL, TEMP_TERM, WINDSQR REAL(hp) :: WET_TERM, A_FERT, A_BIOM REAL(hp) :: LAI, SUNCOS, GWET REAL(hp) :: ARID, NARID @@ -1162,6 +1263,11 @@ SUBROUTINE Soil_NOx_Emission( ExtState, Inst, TS_EMIS, I, & ! Surface temperature [C] TC = ExtState%T2M%Arr%Val(I,J) - 273.15_hp + ! Soil temperature [C] + IF ( Inst%UseSoilTemp ) THEN + TSOIL = ExtState%TSOIL1%Arr%Val(I,J) - 273.15_hp + ENDIF + ! Surface wind speed, squared WINDSQR = ExtState%U10M%Arr%Val(I,J)**2 + & ExtState%V10M%Arr%Val(I,J)**2 @@ -1188,7 +1294,11 @@ SUBROUTINE Soil_NOx_Emission( ExtState, Inst, TS_EMIS, I, & ! Temperature-dependent term of soil NOx emissions [unitless] ! Use GWET instead of climo wet/dry - TEMP_TERM = SOILTEMP( K, TC, GWET ) + IF ( Inst%UseSoilTemp ) THEN + TEMP_TERM = SOILTEMP( Inst, K , TSOIL, GWET ) + ELSE + TEMP_TERM = SOILTEMP( Inst, K, TC, GWET ) + ENDIF ! Soil moisture scaling of soil NOx emissions ARID = Inst%CLIMARID(I,J) @@ -1779,18 +1889,19 @@ END FUNCTION Source_WetN !\\ ! !INTERFACE: ! - FUNCTION SoilTemp( NN, TC, GWET ) RESULT( SOIL_TEMP ) + FUNCTION SoilTemp( Inst, NN, TC, GWET ) RESULT( SOIL_TEMP ) ! ! !INPUT PARAMETERS: ! - INTEGER, INTENT(IN) :: NN ! Soil biome type - REAL(hp), INTENT(IN) :: TC ! Surface air temperature [C] - REAL(hp), INTENT(IN) :: GWET ! Top soil moisture + TYPE(MyInst), POINTER :: Inst ! Instance object + INTEGER, INTENT(IN) :: NN ! Soil biome type + REAL(hp), INTENT(IN) :: TC ! Surface air temperature [C] + REAL(hp), INTENT(IN) :: GWET ! Top soil moisture ! ! !RETURN VALUE: ! - REAL(hp) :: SOIL_TEMP ! Temperature-dependent term of - ! soil NOx emissions [unitless] + REAL(hp) :: SOIL_TEMP ! Temperature-dependent term of + ! soil NOx emissions [unitless] ! ! !REMARKS: ! Based on Ormeci et al., [1999] and Otter et al., [1999] @@ -1818,6 +1929,10 @@ FUNCTION SoilTemp( NN, TC, GWET ) RESULT( SOIL_TEMP ) ! Geophys. Res., 105 , 20,69720,706, 1999. ! (3 ) Yienger, J.J, and H. Levy, Empirical model of global soil-biogenic ! NOx emissions, J. Geophys. Res., 100, D6, 11,447-11464, June 20, 1995. +! (4 ) Wang, Y., C. Ge, L. Castro Garcia1, G.D. Jenerette, P.Y. Oikawa, and +! J. Wang, Improved modelling of soil NOx emissions in a high temperature +! agricultural region: role of background emissions on NO2 trend over the +! US, Environ. Res. Lett., 16(8), DOI: 10.1088/1748-9326/ac16a3, 2021. ! ! !REVISION HISTORY: ! 17 Aug 2009 - R. Yantosca - Initial Version @@ -1837,18 +1952,20 @@ FUNCTION SoilTemp( NN, TC, GWET ) RESULT( SOIL_TEMP ) ! Save surface air temp in shadow variable TMMP TMMP = TC - ! DRY - IF ( GWET < 0.3_hp ) THEN + IF ( .not. Inst%UseSoilTemp ) THEN + ! DRY + IF ( GWET < 0.3_hp ) THEN - ! Convert surface air temperature to model temperature - ! by adding 5 degrees C to model temperature - TMMP = TMMP + 5.0_hp + ! Convert surface air temperature to model temperature + ! by adding 5 degrees C to model temperature + TMMP = TMMP + 5.0_hp - ! WET - ELSE + ! WET + ELSE - TMMP = SOILTA(NN) * TMMP + SOILTB(NN) + TMMP = SOILTA(NN) * TMMP + SOILTB(NN) + ENDIF ENDIF !============================================================== @@ -1867,10 +1984,26 @@ FUNCTION SoilTemp( NN, TC, GWET ) RESULT( SOIL_TEMP ) ELSE - ! Caps temperature response at 30C - IF ( TMMP >= 30.0_hp ) TMMP = 30.0_hp + IF ( Inst%UseSoilTemp ) THEN - SOIL_TEMP = EXP( 0.103_hp * TMMP ) + !---------------------------------------------- + ! Soil NOx scheme from Yi Wang et al. (ERL, 2021) + !---------------------------------------------- + ! Caps temperature response at 40C + IF ( TMMP >= 40.e+0_hp ) TMMP = 40.e+0_hp + IF ( TMMP <= 20.e+0_hp ) SOIL_TEMP = EXP( 0.103 * TMMP ) + IF ( TMMP > 20.e+0_hp ) THEN + SOIL_TEMP=-0.009*(TMMP**3)+0.837*(TMMP**2)+(-22.527)*TMMP+196.149 + ENDIF + + ELSE + + ! Caps temperature response at 30C + IF ( TMMP >= 30.0_hp ) TMMP = 30.0_hp + + SOIL_TEMP = EXP( 0.103_hp * TMMP ) + + ENDIF ENDIF diff --git a/src/Extensions/hcox_state_mod.F90 b/src/Extensions/hcox_state_mod.F90 index ea6e5f02..832c3c66 100644 --- a/src/Extensions/hcox_state_mod.F90 +++ b/src/Extensions/hcox_state_mod.F90 @@ -131,6 +131,7 @@ MODULE HCOX_STATE_MOD TYPE(ExtDat_2R), POINTER :: ALBD ! Surface albedo [-] TYPE(ExtDat_2R), POINTER :: T2M ! 2m Sfce temperature [K] TYPE(ExtDat_2R), POINTER :: TSKIN ! Surface skin temperature [K] + TYPE(ExtDat_2R), POINTER :: TSOIL1 ! Soil temperature, layer 1 [K] TYPE(ExtDat_2R), POINTER :: GWETROOT ! Root soil wetness [1] TYPE(ExtDat_2R), POINTER :: GWETTOP ! Top soil moisture [-] TYPE(ExtDat_2R), POINTER :: SNOWHGT ! Snow height [mm H2O = kg H2O/m2] @@ -355,6 +356,12 @@ SUBROUTINE ExtStateInit( ExtState, RC ) RETURN ENDIF + CALL ExtDat_Init ( ExtState%TSOIL1, RC ) + IF ( RC /= HCO_SUCCESS ) THEN + CALL HCO_ERROR( 'Initializing TSOIL1', RC, THISLOC=LOC ) + RETURN + ENDIF + CALL ExtDat_Init ( ExtState%GWETROOT, RC ) IF ( RC /= HCO_SUCCESS ) THEN CALL HCO_ERROR( 'ERROR 6', RC, THISLOC=LOC ) @@ -676,6 +683,7 @@ SUBROUTINE ExtStateFinal( ExtState ) CALL ExtDat_Cleanup( ExtState%ALBD ) CALL ExtDat_Cleanup( ExtState%T2M ) CALL ExtDat_Cleanup( ExtState%TSKIN ) + CALL ExtDat_Cleanup( ExtState%TSOIL1 ) CALL ExtDat_Cleanup( ExtState%GWETROOT ) CALL ExtDat_Cleanup( ExtState%GWETTOP ) CALL ExtDat_Cleanup( ExtState%SNOWHGT ) diff --git a/src/Interfaces/Standalone/hcoi_standalone_mod.F90 b/src/Interfaces/Standalone/hcoi_standalone_mod.F90 index 1d5085fb..d653bd17 100644 --- a/src/Interfaces/Standalone/hcoi_standalone_mod.F90 +++ b/src/Interfaces/Standalone/hcoi_standalone_mod.F90 @@ -2210,7 +2210,7 @@ SUBROUTINE ExtState_SetFields ( HcoState, ExtState, RC ) ENDIF ENDIF - !%%%%% Air and skin temperature %%%%% + !%%%%% Air temperature %%%%% IF ( ExtState%T2M%DoUse ) THEN Name = 'T2M' CALL ExtDat_Set( HcoState, ExtState%T2M, & @@ -2224,6 +2224,7 @@ SUBROUTINE ExtState_SetFields ( HcoState, ExtState, RC ) ENDIF ENDIF + !%%%%% Skin temperature %%%%% IF ( ExtState%TSKIN%DoUse ) THEN Name = 'TS' CALL ExtDat_Set( HcoState, ExtState%TSKIN, & @@ -2237,6 +2238,20 @@ SUBROUTINE ExtState_SetFields ( HcoState, ExtState, RC ) ENDIF ENDIF + !%%%%% Soil temperature %%%%% + IF ( ExtState%TSOIL1%DoUse ) THEN + Name = 'TSOIL1' + CALL ExtDat_Set( HcoState, ExtState%TSOIL1, & + TRIM( Name ), RC, FIRST=FIRST ) + IF ( RC /= HCO_SUCCESS ) THEN + ErrMsg = 'Could not find quantity "' // TRIM( Name ) // & + '" for the HEMCO standalone simulation!' + CALL HCO_Error( ErrMsg, RC, ThisLoc ) + CALL HCO_Leave( HcoState%Config%Err, RC ) + RETURN + ENDIF + ENDIF + !%%%%% Soil moisture %%%%% IF ( ExtState%GWETROOT%DoUse ) THEN Name = 'GWETROOT' @@ -3057,7 +3072,9 @@ SUBROUTINE Init_Dry_Run( IsDryRun, RC ) CALL Print_Dry_Run_Warning( 6 ) ! Print dry-run header to the HEMCO log file - CALL Print_Dry_Run_Warning( HcoState%Config%Err%LUN ) + IF ( HcoState%Config%Err%LUN > 0 ) THEN + CALL Print_Dry_Run_Warning( HcoState%Config%Err%LUN ) + ENDIF ELSE @@ -3132,7 +3149,9 @@ SUBROUTINE Cleanup_Dry_Run( RC ) CALL Print_Dry_Run_Warning( 6 ) ! Print dry-run header to the HEMCO log file - CALL Print_Dry_Run_Warning( HcoState%Config%Err%LUN ) + IF ( HcoState%Config%Err%LUN > 0 ) THEN + CALL Print_Dry_Run_Warning( HcoState%Config%Err%LUN ) + ENDIF ENDIF