diff --git a/docs/note_fixfield.txt b/docs/note_fixfield.txt index 3b22de5e13..af2539e48a 100644 --- a/docs/note_fixfield.txt +++ b/docs/note_fixfield.txt @@ -4,6 +4,8 @@ They are saved locally on all platforms Hera: /scratch1/NCEPDEV/global/glopara/fix Orion: /work/noaa/global/glopara/fix +Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix +S4: /data/prod/glopara/fix ------------------------------------------------------------------------------ 09/28/2018 diff --git a/docs/source/components.rst b/docs/source/components.rst index 6b947b3432..3ebd575a82 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -60,6 +60,7 @@ Observation data, also known as dump data, is prepared in production and then ar * Hera: /scratch1/NCEPDEV/global/glopara/dump * Orion: /work/noaa/rstprod/dump +* Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump * WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump * S4: /data/prod/glopara/dump diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index 7161e2b742..da54f29521 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -22,6 +22,7 @@ HPC helpdesks * HPSS: rdhpcs.hpss.help@noaa.gov * Gaea: oar.gfdl.help@noaa.gov * S4: david.huber@noaa.gov +* Jet: rdhpcs.jet.help@noaa.gov ====================== Restricted data access @@ -76,6 +77,8 @@ It is advised to use Git v2+ when available. At the time of writing this documen +---------+----------+---------------------------------------+ | Orion | v1.8.3.1 | **module load git/2.28.0** | +---------+----------+---------------------------------------+ +| Jet | v2.18.0 | default | ++---------+----------+---------------------------------------+ | WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | +---------+----------+---------------------------------------+ | S4 | v1.8.3.1 | **module load git/2.30.0** | @@ -96,9 +99,9 @@ For the manage_externals utility functioning:: Error: fatal: ssh variant 'simple' does not support setting port Fix: git config --global ssh.variant ssh -=================================== -Stacksize on R&Ds (Hera, Orion, S4) -=================================== +======================================== +Stacksize on R&Ds (Hera, Orion, Jet, S4) +======================================== Some GFS components, like the UPP, need an unlimited stacksize. Add the following setting into your appropriate .*rc file to support these components: diff --git a/docs/source/init.rst b/docs/source/init.rst index b7a86f5b62..b065af2373 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -190,6 +190,7 @@ Forecast-only P8 prototype initial conditions are made available to users on sup WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/IC/COUPLED HERA: /scratch1/NCEPDEV/climate/role.ufscpara/IC ORION: /work/noaa/global/glopara/data/ICSDIR/prototype_ICs + JET: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs S4: /data/prod/glopara/coupled_ICs These locations are known within the workflow via paths set in ``parm/config/config.coupled_ic``. @@ -217,7 +218,7 @@ Not yet supported. See :ref:`Manual Generation` section below --------------------- Forecast-only coupled --------------------- -Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). +Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). .. _forecastonly-atmonly: diff --git a/docs/source/setup.rst b/docs/source/setup.rst index a4e70fbfcb..eb13b4b6f3 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -28,6 +28,13 @@ Experiment Setup module load miniconda/3.8-s4 + * - Jet + - :: + + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles + module load miniconda3/4.12.0 + conda activate ufswm + If running with Rocoto make sure to have a Rocoto module loaded before running setup scripts: .. list-table:: ROCOTO Module Load Commands @@ -54,6 +61,10 @@ If running with Rocoto make sure to have a Rocoto module loaded before running s - :: module load rocoto/1.3.4 + * - Jet + - :: + + module load rocoto/1.3.3 ^^^^^^^^^^^^^^^^^^^^^^^^ Forecast-only experiment diff --git a/env/JET.env b/env/JET.env index 66d9ed9a3b..73c5031a68 100755 --- a/env/JET.env +++ b/env/JET.env @@ -18,11 +18,13 @@ if [[ "${PARTITION_BATCH}" = "xjet" ]]; then export npe_node_max=24 elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then export npe_node_max=16 +elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 fi -export launcher="srun -l --export=ALL" +export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL" export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" -# Configure STACK +# Configure MPI environment export OMP_STACKSIZE=2048000 export NTHSTACK=1024000000 @@ -34,46 +36,85 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then nth_max=$((npe_node_max / npe_node_prep)) export POE="NO" - export BACK=${BACK:-"NO"} + export BACK="NO" export sys_tp="JET" + export launcher_PREP="srun" elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + export CFP_MP="YES" if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} elif [[ "${step}" = "atmanalrun" ]]; then + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmanalrun)) export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} ${npe_atmanalrun}" + export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" elif [[ "${step}" = "atmensanalrun" ]]; then + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmensanalrun)) export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} ${npe_atmensanalrun}" + export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" elif [[ "${step}" = "aeroanlrun" ]]; then + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_aeroanlrun)) export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} - export APRUN_AEROANL="${launcher} ${npe_aeroanlrun}" + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalbmat)) + + export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + +elif [[ "${step}" = "ocnanalrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalrun)) + + export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" -elif [[ "${step}" = "anal" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" nth_max=$((npe_node_max / npe_node_anal)) - export NTHREADS_GSI=${nth_gsi:-${nth_max}} + export NTHREADS_GSI=${nth_anal:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} ${npe_gsi:-${npe_anal:-${PBS_NP}}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} @@ -82,11 +123,11 @@ elif [[ "${step}" = "anal" ]]; then export NTHREADS_CYCLE=${nth_cycle:-12} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_cycle=${ntiles:-6} - export APRUN_CYCLE="${launcher} ${npe_cycle}" + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" export NTHREADS_GAUSFCANL=1 npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="${launcher} ${npe_gausfcanl}" + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" elif [[ "${step}" = "sfcanl" ]]; then nth_max=$((npe_node_max / npe_node_sfcanl)) @@ -94,35 +135,38 @@ elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${nth_sfcanl:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_sfcanl=${ntiles:-6} - export APRUN_CYCLE="${launcher} ${npe_sfcanl}" + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" elif [[ "${step}" = "gldas" ]]; then - nth_max=$((npe_node_max / npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} - [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} - export APRUN_GLDAS="${launcher} ${npe_gldas}" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} - export APRUN_GAUSSIAN="${launcher} ${npe_gaussian}" + echo "WARNING: ${step} is not enabled on ${machine}!" elif [[ "${step}" = "eobs" ]]; then + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + nth_max=$((npe_node_max / npe_node_eobs)) - export NTHREADS_GSI=${nth_gsi:-${nth_max}} + export NTHREADS_GSI=${nth_eobs:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} ${npe_gsi:-${npe_eobs:-${PBS_NP}}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" elif [[ "${step}" = "eupd" ]]; then nth_max=$((npe_node_max / npe_node_eupd)) - export NTHREADS_ENKF=${nth_enkf:-${nth_max}} + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} - export APRUN_ENKF="${launcher} ${npe_enkf:-${npe_eupd:-${PBS_NP}}}" + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then @@ -145,11 +189,11 @@ elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${nth_np:-1} [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher}" + export APRUN_NP="${launcher} -n ${npe_post}" export NTHREADS_DWN=${nth_dwn:-1} [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_NP="${launcher}" + export APRUN_DWN="${launcher} -n ${npe_dwn}" elif [[ "${step}" = "ecen" ]]; then @@ -157,7 +201,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${nth_ecen:-${nth_max}} [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} - export APRUN_ECEN="${launcher} ${npe_ecen:-${PBS_NP}}" + export APRUN_ECEN="${launcher} -n ${npe_ecen}" export NTHREADS_CHGRES=${nth_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} @@ -165,7 +209,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} ${npe_ecen:-${PBS_NP}}" + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" elif [[ "${step}" = "esfc" ]]; then @@ -173,12 +217,11 @@ elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${nth_esfc:-${nth_max}} [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} - export APRUN_ESFC="${launcher} ${npe_esfc:-${PBS_NP}}" + export APRUN_ESFC="${launcher} -n ${npe_esfc}" - export NTHREADS_CYCLE=${nth_cycle:-12} + export NTHREADS_CYCLE=${nth_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} - export APRUN_CYCLE="${launcher} ${npe_esfc}" - + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" elif [[ "${step}" = "epos" ]]; then @@ -186,23 +229,25 @@ elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${nth_epos:-${nth_max}} [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} - export APRUN_EPOS="${launcher} ${npe_epos:-${PBS_NP}}" + export APRUN_EPOS="${launcher} -n ${npe_epos}" elif [[ "${step}" = "init" ]]; then - export APRUN="${launcher}" + export APRUN="${launcher} -n ${npe_init}" elif [[ "${step}" = "postsnd" ]]; then + export CFP_MP="YES" + nth_max=$((npe_node_max / npe_node_postsnd)) export NTHREADS_POSTSND=${nth_postsnd:-1} [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} - export APRUN_POSTSND="${launcher} ${npe_postsnd}" + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} - export APRUN_POSTSNDCFP="${launcher} ${npe_postsndcfp}" + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" elif [[ "${step}" = "awips" ]]; then diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh index 4230b8a62b..9cbbb746b0 100755 --- a/jobs/rocoto/vrfy.sh +++ b/jobs/rocoto/vrfy.sh @@ -17,12 +17,14 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" ############################################################### export COMPONENT="atmos" -export CDATEm1=$(${NDATE} -24 ${CDATE}) -export PDYm1=$(echo ${CDATEm1} | cut -c1-8) +CDATEm1=$(${NDATE} -24 "${CDATE}") +export CDATEm1 +PDYm1=$(echo "${CDATEm1}" | cut -c1-8) +export PDYm1 -CDATEm1c=$(${NDATE} -06 ${CDATE}) -PDYm1c=$(echo ${CDATEm1c} | cut -c1-8) -pcyc=$(echo ${CDATEm1c} | cut -c9-10) +CDATEm1c=$(${NDATE} -06 "${CDATE}") +PDYm1c=$(echo "${CDATEm1c}" | cut -c1-8) +pcyc=$(echo "${CDATEm1c}" | cut -c9-10) export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" @@ -30,20 +32,21 @@ export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" ############################################################### echo echo "=============== START TO GENERATE QUARTER DEGREE GRIB1 FILES ===============" -if [ ${MKPGB4PRCP} = "YES" -a ${CDUMP} = "gfs" ]; then - if [ ! -d ${ARCDIR} ]; then mkdir -p ${ARCDIR} ; fi +if [[ ${MKPGB4PRCP} = "YES" && ${CDUMP} = "gfs" ]]; then + if [[ ! -d "${ARCDIR}" ]]; then mkdir -p "${ARCDIR}" ; fi nthreads_env=${OMP_NUM_THREADS:-1} # get threads set in env export OMP_NUM_THREADS=1 - cd ${COMIN} + set -e + cd "${COMIN}" fhmax=${vhr_rain:-${FHMAX_GFS}} fhr=0 - while [ ${fhr} -le ${fhmax} ]; do - fhr2=$(printf %02i ${fhr}) - fhr3=$(printf %03i ${fhr}) + while [[ ${fhr} -le ${fhmax} ]]; do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") fname=${CDUMP}.t${cyc}z.sfluxgrbf${fhr3}.grib2 fileout=${ARCDIR}/pgbq${fhr2}.${CDUMP}.${CDATE}.grib2 - ${WGRIB2} ${fname} -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib ${fileout} - (( fhr = ${fhr} + 6 )) + ${WGRIB2} "${fname}" -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib "${fileout}" + (( fhr = fhr + 6 )) done export OMP_NUM_THREADS=${nthreads_env} # revert to threads set in env fi @@ -60,7 +63,7 @@ fi ############################################################### echo echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" -if [ ${VRFYRAD} = "YES" -a "${CDUMP}" = "${CDFNL}" -a "${CDATE}" != "${SDATE}" ]; then +if [[ ${VRFYRAD} = "YES" && ${CDUMP} = "${CDFNL}" && ${CDATE} != "${SDATE}" ]]; then export EXP=${PSLOT} export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" @@ -76,7 +79,7 @@ fi ############################################################### echo echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [ "${VRFYOZN}" = "YES" -a "${CDUMP}" = "${CDFNL}" -a "${CDATE}" != "${SDATE}" ]; then +if [[ ${VRFYOZN} = "YES" && ${CDUMP} = "${CDFNL}" && ${CDATE} != "${SDATE}" ]]; then export EXP=${PSLOT} export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" @@ -92,7 +95,7 @@ fi ############################################################### echo echo "=============== START TO RUN MINMON ===============" -if [ "${VRFYMINMON}" = "YES" -a "${CDATE}" != "${SDATE}" ]; then +if [[ ${VRFYMINMON} = "YES" && ${CDATE} != "${SDATE}" ]]; then export COMOUT="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${CDUMP}.${PDY}/${cyc}" @@ -107,9 +110,10 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" -if [ ${VRFYTRAK} = "YES" ]; then +if [[ ${VRFYTRAK} = "YES" ]]; then - export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} + export COMINsyn ${TRACKERSH} fi @@ -118,7 +122,7 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION ===============" -if [ ${VRFYGENESIS} = "YES" -a "${CDUMP}" = "gfs" ]; then +if [[ ${VRFYGENESIS} = "YES" && "${CDUMP}" = "gfs" ]]; then ${GENESISSH} fi @@ -126,15 +130,15 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION (FSU) ===============" -if [ ${VRFYFSU} = "YES" -a "${CDUMP}" = "gfs" ]; then +if [[ ${VRFYFSU} = "YES" && "${CDUMP}" = "gfs" ]]; then ${GENESISFSU} fi ############################################################### # Force Exit out cleanly -cd ${DATAROOT} -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATA} ; fi +cd "${DATAROOT}" +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi exit 0 diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index 201daa7b8d..e5322cbb2c 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -22,7 +22,7 @@ if [[ -d /lfs/f1 ]] ; then source /usr/share/lmod/lmod/init/$__ms_shell fi module reset -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet if ( ! eval module help > /dev/null 2>&1 ) ; then source /apps/lmod/lmod/init/$__ms_shell diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index 93ad98eb70..b772ea5263 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -2,15 +2,20 @@ help([[ Load environment to run GFS on Jet ]]) -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/nwprod/hpc-stack/libs/modulefiles/stack") +prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/role.epic/hpc-stack/libs/intel-18.0.5.274/modulefiles/stack") -load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc", "1.2.0")) load(pathJoin("hpc-intel", "18.0.5.274")) load(pathJoin("hpc-impi", "2018.4.274")) +load(pathJoin("cmake", "3.20.1")) load("hpss") load(pathJoin("gempak", "7.4.2")) +load(pathJoin("ncl", "6.6.2")) +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("libpng", "1.6.35")) load(pathJoin("cdo", "1.9.5")) +load(pathJoin("R", "4.0.2")) load(pathJoin("hdf5", "1.10.6")) load(pathJoin("netcdf", "4.7.4")) @@ -19,7 +24,15 @@ load(pathJoin("nco", "4.9.1")) load(pathJoin("prod_util", "1.2.2")) load(pathJoin("grib_util", "1.2.2")) load(pathJoin("g2tmpl", "1.10.0")) +load(pathJoin("ncdiag", "1.0.0")) load(pathJoin("crtm", "2.4.0")) load(pathJoin("wgrib2", "2.0.8")) +setenv("WGRIB2","wgrib2") + +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v1.0.1/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + +prepend_path("MODULEPATH", "/contrib/anaconda/modulefiles") +load(pathJoin("anaconda", "5.3.1")) whatis("Description: GFS run environment") diff --git a/parm/config/config.aero b/parm/config/config.aero index 171701cd2a..1cb3bf5679 100644 --- a/parm/config/config.aero +++ b/parm/config/config.aero @@ -19,6 +19,9 @@ case $machine in "WCOSS2") AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; *) echo "FATAL ERROR: Machine $machine unsupported for aerosols" exit 2 diff --git a/parm/config/config.coupled_ic b/parm/config/config.coupled_ic index 1977c56ca4..50fab283b5 100644 --- a/parm/config/config.coupled_ic +++ b/parm/config/config.coupled_ic @@ -15,6 +15,8 @@ elif [[ "${machine}" == "ORION" ]]; then export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" elif [[ "${machine}" == "S4" ]]; then export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" fi diff --git a/parm/config/config.metp b/parm/config/config.metp index 4be7151ffa..c90903f6a5 100644 --- a/parm/config/config.metp +++ b/parm/config/config.metp @@ -6,7 +6,7 @@ echo "BEGIN: config.metp" # Get task specific resources -. $EXPDIR/config.resources metp +. "${EXPDIR}/config.resources" metp export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus @@ -18,15 +18,15 @@ export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus #---------------------------------------------------------- ## EMC_VERIF_GLOBAL SETTINGS export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd -export VERIF_GLOBALSH=$HOMEverif_global/ush/run_verif_global_in_global_workflow.sh +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh ## INPUT DATA SETTINGS -export model=$PSLOT +export model=${PSLOT} export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export model_hpss_dir=$ATARDIR/.. +export model_hpss_dir=${ATARDIR}/.. export get_data_from_hpss="NO" export hpss_walltime="10" ## OUTPUT SETTINGS -export model_stat_dir=$ARCDIR/.. +export model_stat_dir=${ARCDIR}/.. export make_met_data_by="VALID" export SENDMETVIEWER="NO" ## DATE SETTINGS @@ -39,20 +39,20 @@ export log_MET_output_to_METplus="yes" export g2g1_type_list="anom pres sfc" export g2g1_anom_truth_name="self_anl" export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_anom_fhr_min=$FHMIN_GFS -export g2g1_anom_fhr_max=$FHMAX_GFS +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} export g2g1_anom_grid="G002" export g2g1_anom_gather_by="VSDB" export g2g1_pres_truth_name="self_anl" export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_pres_fhr_min=$FHMIN_GFS -export g2g1_pres_fhr_max=$FHMAX_GFS +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} export g2g1_pres_grid="G002" export g2g1_pres_gather_by="VSDB" export g2g1_sfc_truth_name="self_f00" export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_sfc_fhr_min=$FHMIN_GFS -export g2g1_sfc_fhr_max=$FHMAX_GFS +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} export g2g1_sfc_grid="G002" export g2g1_sfc_gather_by="VSDB" export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" @@ -62,19 +62,19 @@ export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow expe export g2o1_type_list="upper_air conus_sfc" export g2o1_upper_air_msg_type_list="ADPUPA" export g2o1_upper_air_vhr_list="00 06 12 18" -export g2o1_upper_air_fhr_min=$FHMIN_GFS +export g2o1_upper_air_fhr_min=${FHMIN_GFS} export g2o1_upper_air_fhr_max="240" export g2o1_upper_air_grid="G003" export g2o1_upper_air_gather_by="VSDB" export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_conus_sfc_fhr_min=$FHMIN_GFS +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} export g2o1_conus_sfc_fhr_max="240" export g2o1_conus_sfc_grid="G104" export g2o1_conus_sfc_gather_by="VSDB" export g2o1_polar_sfc_msg_type_list="IABP" export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_polar_sfc_fhr_min=$FHMIN_GFS +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} export g2o1_polar_sfc_fhr_max="240" export g2o1_polar_sfc_grid="G219" export g2o1_polar_sfc_gather_by="VSDB" @@ -87,7 +87,7 @@ export precip1_type_list="ccpa_accum24hr" export precip1_ccpa_accum24hr_model_bucket="06" export precip1_ccpa_accum24hr_model_var="APCP" export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export precip1_ccpa_accum24hr_fhr_min=$FHMIN_GFS +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} export precip1_ccpa_accum24hr_fhr_max="180" export precip1_ccpa_accum24hr_grid="G211" export precip1_ccpa_accum24hr_gather_by="VSDB" diff --git a/parm/config/config.resources b/parm/config/config.resources index f211dbc93b..31b871da61 100644 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -4,7 +4,7 @@ # Set resource information for job tasks # e.g. walltime, node, cores per node, memory etc. -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" @@ -49,7 +49,7 @@ elif [[ ${machine} = "ORION" ]]; then export npe_node_max=40 fi -if [ ${step} = "prep" ]; then +if [[ ${step} = "prep" ]]; then export wtime_prep='00:30:00' export npe_prep=4 export npe_node_prep=2 @@ -69,91 +69,101 @@ elif [[ "${step}" = "aerosol_init" ]]; then export NTASKS=${npe_aerosol_init} export memory_aerosol_init="6G" -elif [ ${step} = "waveinit" ]; then +elif [[ ${step} = "waveinit" ]]; then export wtime_waveinit="00:10:00" export npe_waveinit=12 export nth_waveinit=1 - export npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit export NTASKS=${npe_waveinit} export memory_waveinit="2GB" -elif [ ${step} = "waveprep" ]; then +elif [[ ${step} = "waveprep" ]]; then export wtime_waveprep="00:10:00" export npe_waveprep=5 export npe_waveprep_gfs=65 export nth_waveprep=1 export nth_waveprep_gfs=1 - export npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) - export npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs export NTASKS=${npe_waveprep} export NTASKS_gfs=${npe_waveprep_gfs} export memory_waveprep="100GB" export memory_waveprep_gfs="150GB" -elif [ ${step} = "wavepostsbs" ]; then +elif [[ ${step} = "wavepostsbs" ]]; then export wtime_wavepostsbs="00:20:00" export wtime_wavepostsbs_gfs="03:00:00" export npe_wavepostsbs=8 export nth_wavepostsbs=1 - export npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs export NTASKS=${npe_wavepostsbs} export memory_wavepostsbs="10GB" export memory_wavepostsbs_gfs="10GB" -elif [ ${step} = "wavepostbndpnt" ]; then +elif [[ ${step} = "wavepostbndpnt" ]]; then export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 - export npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True -elif [ ${step} = "wavepostbndpntbll" ]; then +elif [[ ${step} = "wavepostbndpntbll" ]]; then export wtime_wavepostbndpntbll="01:00:00" export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 - export npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True -elif [ ${step} = "wavepostpnt" ]; then +elif [[ ${step} = "wavepostpnt" ]]; then export wtime_wavepostpnt="01:30:00" export npe_wavepostpnt=200 export nth_wavepostpnt=1 - export npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt export NTASKS=${npe_wavepostpnt} export is_exclusive=True -elif [ ${step} = "wavegempak" ]; then +elif [[ ${step} = "wavegempak" ]]; then export wtime_wavegempak="02:00:00" export npe_wavegempak=1 export nth_wavegempak=1 - export npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak export NTASKS=${npe_wavegempak} export memory_wavegempak="1GB" -elif [ ${step} = "waveawipsbulls" ]; then +elif [[ ${step} = "waveawipsbulls" ]]; then export wtime_waveawipsbulls="00:20:00" export npe_waveawipsbulls=1 export nth_waveawipsbulls=1 - export npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls export NTASKS=${npe_waveawipsbulls} export is_exclusive=True -elif [ ${step} = "waveawipsgridded" ]; then +elif [[ ${step} = "waveawipsgridded" ]]; then export wtime_waveawipsgridded="02:00:00" export npe_waveawipsgridded=1 export nth_waveawipsgridded=1 - export npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded export NTASKS=${npe_waveawipsgridded} export memory_waveawipsgridded_gfs="1GB" @@ -325,7 +335,7 @@ elif [[ "${step}" = "ocnanalpost" ]]; then npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) export npe_node_ocnanalpost -elif [ ${step} = "anal" ]; then +elif [[ ${step} = "anal" ]]; then export wtime_anal="00:50:00" export wtime_anal_gfs="00:40:00" @@ -337,13 +347,12 @@ elif [ ${step} = "anal" ]; then export nth_anal=8 export nth_anal_gfs=8 fi - if [ ${CASE} = "C384" ]; then + if [[ ${CASE} = "C384" ]]; then export npe_anal=160 export npe_anal_gfs=160 export nth_anal=10 export nth_anal_gfs=10 if [[ ${machine} = "S4" ]]; then - #For the analysis jobs, the number of tasks and cores used must be equal #On the S4-s4 partition, this is accomplished by increasing the task #count to a multiple of 32 if [[ ${PARTITION_BATCH} = "s4" ]]; then @@ -372,47 +381,54 @@ elif [ ${step} = "anal" ]; then fi fi fi - export npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal export nth_cycle=${nth_anal} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export is_exclusive=True -elif [ ${step} = "analcalc" ]; then +elif [[ ${step} = "analcalc" ]]; then export wtime_analcalc="00:10:00" export npe_analcalc=127 - export ntasks=${npe_analcalc} + export ntasks="${npe_analcalc}" export nth_analcalc=1 export nth_echgres=4 export nth_echgres_gfs=12 - export npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc export is_exclusive=True -elif [ ${step} = "analdiag" ]; then +elif [[ ${step} = "analdiag" ]]; then export wtime_analdiag="00:15:00" export npe_analdiag=96 # Should be at least twice npe_ediag export nth_analdiag=1 - export npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag export memory_analdiag="48GB" -elif [ ${step} = "sfcanl" ]; then +elif [[ ${step} = "sfcanl" ]]; then export wtime_sfcanl="00:10:00" export npe_sfcanl=6 export nth_sfcanl=1 - export npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl export is_exclusive=True -elif [ ${step} = "gldas" ]; then +elif [[ ${step} = "gldas" ]]; then export wtime_gldas="00:10:00" export npe_gldas=112 export nth_gldas=1 - export npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas export npe_gaussian=96 export nth_gaussian=1 - export npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian export is_exclusive=True elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then @@ -507,25 +523,25 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" if [[ "${_CDUMP}" =~ "gfs" ]]; then - declare -x npe_${step}_gfs="${NTASKS_TOT}" - declare -x nth_${step}_gfs=1 # ESMF handles threading for the UFS-weather-model - declare -x npe_node_${step}_gfs="${npe_node_max}" + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" else - declare -x npe_${step}="${NTASKS_TOT}" - declare -x nth_${step}=1 # ESMF handles threading for the UFS-weather-model - declare -x npe_node_${step}="${npe_node_max}" + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" fi done case "${CASE}" in "C48" | "C96" | "C192") - declare -x wtime_${step}="00:30:00" - declare -x wtime_${step}_gfs="03:00:00" + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" ;; "C384" | "C768" | "C1152") - declare -x wtime_${step}="01:00:00" - declare -x wtime_${step}_gfs="06:00:00" + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" ;; *) echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" @@ -536,20 +552,25 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then unset _CDUMP _CDUMP_LIST unset NTASKS_TOT -elif [ ${step} = "ocnpost" ]; then +elif [[ ${step} = "ocnpost" ]]; then export wtime_ocnpost="00:30:00" export npe_ocnpost=1 export npe_node_ocnpost=1 export nth_ocnpost=1 export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi -elif [ ${step} = "post" ]; then +elif [[ ${step} = "post" ]]; then export wtime_post="00:12:00" export wtime_post_gfs="01:00:00" export npe_post=126 - res=$(echo ${CASE} | cut -c2-) + res=$(echo "${CASE}" | cut -c2-) if (( npe_post > res )); then export npe_post=${res} fi @@ -561,7 +582,7 @@ elif [ ${step} = "post" ]; then if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi export is_exclusive=True -elif [ ${step} = "wafs" ]; then +elif [[ ${step} = "wafs" ]]; then export wtime_wafs="00:30:00" export npe_wafs=1 @@ -569,7 +590,7 @@ elif [ ${step} = "wafs" ]; then export nth_wafs=1 export memory_wafs="1GB" -elif [ ${step} = "wafsgcip" ]; then +elif [[ ${step} = "wafsgcip" ]]; then export wtime_wafsgcip="00:30:00" export npe_wafsgcip=2 @@ -577,39 +598,43 @@ elif [ ${step} = "wafsgcip" ]; then export npe_node_wafsgcip=1 export memory_wafsgcip="50GB" -elif [ ${step} = "wafsgrib2" ]; then +elif [[ ${step} = "wafsgrib2" ]]; then export wtime_wafsgrib2="00:30:00" export npe_wafsgrib2=18 export nth_wafsgrib2=1 - export npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 export memory_wafsgrib2="80GB" -elif [ ${step} = "wafsblending" ]; then +elif [[ ${step} = "wafsblending" ]]; then export wtime_wafsblending="00:30:00" export npe_wafsblending=1 export nth_wafsblending=1 - export npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending export memory_wafsblending="15GB" -elif [ ${step} = "wafsgrib20p25" ]; then +elif [[ ${step} = "wafsgrib20p25" ]]; then export wtime_wafsgrib20p25="00:30:00" export npe_wafsgrib20p25=11 export nth_wafsgrib20p25=1 - export npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 export memory_wafsgrib20p25="80GB" -elif [ ${step} = "wafsblending0p25" ]; then +elif [[ ${step} = "wafsblending0p25" ]]; then export wtime_wafsblending0p25="00:30:00" export npe_wafsblending0p25=1 export nth_wafsblending0p25=1 - export npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 export memory_wafsblending0p25="15GB" -elif [ ${step} = "vrfy" ]; then +elif [[ ${step} = "vrfy" ]]; then export wtime_vrfy="03:00:00" export wtime_vrfy_gfs="06:00:00" @@ -629,9 +654,10 @@ elif [[ "${step}" = "fit2obs" ]]; then export npe_fit2obs=3 export nth_fit2obs=1 export npe_node_fit2obs=1 + export memory_fit2obs="20G" if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi -elif [ ${step} = "metp" ]; then +elif [[ "${step}" = "metp" ]]; then export nth_metp=1 export wtime_metp="03:00:00" @@ -642,7 +668,7 @@ elif [ ${step} = "metp" ]; then export npe_node_metp_gfs=4 export is_exclusive=True -elif [ ${step} = "echgres" ]; then +elif [[ ${step} = "echgres" ]]; then export wtime_echgres="00:10:00" export npe_echgres=3 @@ -652,7 +678,7 @@ elif [ ${step} = "echgres" ]; then export memory_echgres="200GB" fi -elif [ ${step} = "init" ]; then +elif [[ ${step} = "init" ]]; then export wtime_init="00:30:00" export npe_init=24 @@ -660,14 +686,14 @@ elif [ ${step} = "init" ]; then export npe_node_init=6 export memory_init="70G" -elif [ ${step} = "init_chem" ]; then +elif [[ ${step} = "init_chem" ]]; then export wtime_init_chem="00:30:00" export npe_init_chem=1 export npe_node_init_chem=1 export is_exclusive=True -elif [ ${step} = "mom6ic" ]; then +elif [[ ${step} = "mom6ic" ]]; then export wtime_mom6ic="00:30:00" export npe_mom6ic=24 @@ -680,12 +706,12 @@ elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then eval "export npe_${step}=1" eval "export npe_node_${step}=1" eval "export nth_${step}=1" - eval "export memory_${step}=2048M" + eval "export memory_${step}=4096M" if [[ "${machine}" = "WCOSS2" ]]; then eval "export memory_${step}=50GB" fi -elif [ ${step} = "coupled_ic" ]; then +elif [[ ${step} = "coupled_ic" ]]; then export wtime_coupled_ic="00:15:00" export npe_coupled_ic=1 @@ -693,34 +719,39 @@ elif [ ${step} = "coupled_ic" ]; then export nth_coupled_ic=1 export is_exclusive=True -elif [ ${step} = "atmensanalprep" ]; then +elif [[ ${step} = "atmensanalprep" ]]; then export wtime_atmensanalprep="00:10:00" export npe_atmensanalprep=1 export nth_atmensanalprep=1 - export npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) + npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) + export npe_node_atmensanalprep export is_exclusive=True -elif [ ${step} = "atmensanalrun" ]; then +elif [[ ${step} = "atmensanalrun" ]]; then # make below case dependent later export layout_x=2 export layout_y=3 export wtime_atmensanalrun="00:30:00" - export npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanalrun + npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanalrun_gfs export nth_atmensanalrun=1 export nth_atmensanalrun_gfs=${nth_atmensanalrun} export is_exclusive=True - export npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) + npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) + export npe_node_atmensanalrun -elif [ ${step} = "atmensanalpost" ]; then +elif [[ ${step} = "atmensanalpost" ]]; then export wtime_atmensanalpost="00:30:00" export npe_atmensanalpost=${npe_node_max} export nth_atmensanalpost=1 - export npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) + npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) + export npe_node_atmensanalpost export is_exclusive=True elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then @@ -737,7 +768,8 @@ elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export npe_eomg=${npe_eobs} export nth_eobs=2 export nth_eomg=${nth_eobs} - export npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs export npe_node_eomg=${npe_node_eobs} export is_exclusive=True #The number of tasks and cores used must be the same for eobs @@ -746,70 +778,75 @@ elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export npe_node_eobs=10 fi -elif [ ${step} = "ediag" ]; then +elif [[ ${step} = "ediag" ]]; then export wtime_ediag="00:15:00" export npe_ediag=48 export nth_ediag=1 - export npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag export memory_ediag="30GB" -elif [ ${step} = "eupd" ]; then +elif [[ ${step} = "eupd" ]]; then export wtime_eupd="00:30:00" - if [ ${CASE} = "C768" ]; then + if [[ ${CASE} = "C768" ]]; then export npe_eupd=480 export nth_eupd=6 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 fi - elif [ ${CASE} = "C384" ]; then + elif [[ ${CASE} = "C384" ]]; then export npe_eupd=270 export nth_eupd=2 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 - elif [[ "${machine}" = "HERA" ]]; then + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then export nth_eupd=8 - fi - if [[ ${machine} = "S4" ]]; then + elif [[ ${machine} = "S4" ]]; then export npe_eupd=160 export nth_eupd=2 fi elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eupd=42 export nth_eupd=2 - if [[ "${machine}" = "HERA" ]]; then + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then export nth_eupd=4 fi fi - export npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd export is_exclusive=True -elif [ ${step} = "ecen" ]; then +elif [[ ${step} = "ecen" ]]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=4 if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi - export npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen export nth_cycle=${nth_ecen} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export is_exclusive=True -elif [ ${step} = "esfc" ]; then +elif [[ ${step} = "esfc" ]]; then export wtime_esfc="00:06:00" export npe_esfc=80 export nth_esfc=1 - export npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc export nth_cycle=${nth_esfc} - export npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle export memory_esfc="80GB" -elif [ ${step} = "epos" ]; then +elif [[ ${step} = "epos" ]]; then export wtime_epos="00:15:00" export npe_epos=80 @@ -817,10 +854,11 @@ elif [ ${step} = "epos" ]; then if [[ "${machine}" == "HERA" ]]; then export nth_epos=6 fi - export npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos export is_exclusive=True -elif [ ${step} = "postsnd" ]; then +elif [[ ${step} = "postsnd" ]]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 @@ -828,12 +866,14 @@ elif [ ${step} = "postsnd" ]; then export npe_node_postsnd=10 export npe_postsndcfp=9 export npe_node_postsndcfp=1 - if [[ "$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc)" -gt "${npe_node_max}" ]]; then - export npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd fi export is_exclusive=True -elif [ ${step} = "awips" ]; then +elif [[ ${step} = "awips" ]]; then export wtime_awips="03:30:00" export npe_awips=1 @@ -841,7 +881,7 @@ elif [ ${step} = "awips" ]; then export nth_awips=1 export memory_awips="3GB" -elif [ ${step} = "gempak" ]; then +elif [[ ${step} = "gempak" ]]; then export wtime_gempak="03:00:00" export npe_gempak=2 diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy index 1cf08f97c8..7707f99f89 100644 --- a/parm/config/config.vrfy +++ b/parm/config/config.vrfy @@ -6,7 +6,7 @@ echo "BEGIN: config.vrfy" # Get task specific resources -. $EXPDIR/config.resources vrfy +. "${EXPDIR}/config.resources" vrfy export CDFNL="gdas" # Scores verification against GDAS/GFS analysis export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification @@ -22,40 +22,40 @@ export RUNMOS="NO" # whether to run entire MOS package # Minimization, Radiance and Ozone Monitoring #---------------------------------------------------------- -if [ $VRFYRAD = "YES" -o $VRFYMINMON = "YES" -o $VRFYOZN = "YES" ]; then +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then export envir="para" - export COM_IN=$ROTDIR + export COM_IN=${ROTDIR} # Radiance Monitoring - if [[ "$VRFYRAD" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - export RADMON_SUFFIX=$PSLOT - export TANKverf="$NOSCRUB/monitor/radmon" - export VRFYRADSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD" + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" fi # Minimization Monitoring - if [[ "$VRFYMINMON" = "YES" ]] ; then - - export MINMON_SUFFIX=$PSLOT - export M_TANKverf="$NOSCRUB/monitor/minmon" - if [[ "$CDUMP" = "gdas" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON" - elif [[ "$CDUMP" = "gfs" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGFS_ATMOS_VMINMON" + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" fi fi # Ozone Monitoring - if [[ "$VRFYOZN" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - export HOMEgfs_ozn="$HOMEgfs" - export OZNMON_SUFFIX=$PSLOT - export TANKverf_ozn="$NOSCRUB/monitor/oznmon" - export VRFYOZNSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN" + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" fi @@ -69,33 +69,35 @@ fi export ens_tracker_ver=v1.1.15.5 export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} -if [ "$VRFYTRAK" = "YES" ]; then +if [[ "${VRFYTRAK}" = "YES" ]]; then - export TRACKERSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_TRACKER" - export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} - if [ "$CDUMP" = "gdas" ]; then + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then export FHOUT_CYCLONE=3 - export FHMAX_CYCLONE=$FHMAX + export FHMAX_CYCLONE=${FHMAX} else export FHOUT_CYCLONE=6 - export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE fi fi -if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then - export GENESISSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_GENESIS" + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" fi -if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then - export GENESISFSU="$HOMEgfs/jobs/JGFS_ATMOS_FSU_GENESIS" + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" fi -if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then - if [ "$machine" = "HERA" ] ; then + if [[ "${machine}" = "HERA" ]] ; then export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" else echo "WARNING: MOS package is not enabled on ${machine}!" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index f3cad05ffd..af15be7b1f 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -90,9 +90,9 @@ fi source ./partial_build.sh ${_verbose_opt} ${_partial_opt} # shellcheck disable= +# Disable gldas on Jet if [[ ${MACHINE_ID} =~ jet.* ]]; then - Build_gldas="false" - Build_ww3_prepost="false" + Build_gldas="false" fi #------------------------------------ diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 3f34d9eaa3..4d53cf2d1f 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -157,7 +157,7 @@ mkdir -p "${logdir}" errs=0 checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "0b8ff56" ; errs=$((errs + $?)) checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "4e673bf" ; errs=$((errs + $?)) -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-c22aaad}" ; errs=$((errs + $?)) +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-0c8e74c}" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) if [[ ${checkout_gsi} == "YES" ]]; then diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 9250c89888..647722b7a3 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -50,7 +50,7 @@ fi if [[ -d /lfs/f1 ]] ; then # We are on NOAA Cactus or Dogwood MACHINE_ID=wcoss2 -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet MACHINE_ID=jet elif [[ -d /scratch1 ]] ; then diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index c64db3196d..2899e69514 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -10,15 +10,15 @@ fi ulimit_s=$( ulimit -S -s ) # Find module command and purge: -source "$HOMEgfs/modulefiles/module-setup.sh.inc" +source "${HOMEgfs}/modulefiles/module-setup.sh.inc" # Load our modules: -module use "$HOMEgfs/modulefiles" +module use "${HOMEgfs}/modulefiles" if [[ -d /lfs/f1 ]]; then # We are on WCOSS2 (Cactus or Dogwood) module load module_base.wcoss2 -elif [[ -d /lfs3 ]] ; then +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet module load module_base.jet elif [[ -d /scratch1 ]] ; then @@ -43,7 +43,7 @@ fi module list # Restore stack soft limit: -ulimit -S -s "$ulimit_s" +ulimit -S -s "${ulimit_s}" unset ulimit_s set_trace diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml new file mode 100644 index 0000000000..903213b761 --- /dev/null +++ b/workflow/hosts/jet.yaml @@ -0,0 +1,23 @@ +BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' +DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' +PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' +COMROOT: '/lfs4/HFIP/hfv3gfs/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/lfs4/HFIP/hfv3gfs/${USER}' +STMP: '/lfs4/HFIP/hfv3gfs/${USER}/stmp' +PTMP: '/lfs4/HFIP/hfv3gfs/${USER}/ptmp' +NOSCRUB: $HOMEDIR +ACCOUNT: hfv3gfs +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: kjet +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C384', 'C192', 'C96', 'C48']