Skip to content

Commit

Permalink
GDAS initialization scripts
Browse files Browse the repository at this point in the history
Baseline set of utility scripts to start a
GDAS/ENKF cycle using FV3GFS data as input. 
The scripts pull the needed data from HPSS and 
run the chgres_cube program for the high-res 
and ENKF members.

chgres_cube - Fix three argument mismatches in call to
error handler routine (see #69 for details).  Update Cray 
build module to point to my own copy of the wgrib2 api
instead of Dusan's.  Point to official version of ESMF v8
on Cray and Dell.

This commit references #33.
  • Loading branch information
GeorgeGayno-NOAA authored Feb 28, 2020
1 parent aeca779 commit 5b422ce
Show file tree
Hide file tree
Showing 15 changed files with 1,117 additions and 12 deletions.
15 changes: 9 additions & 6 deletions modulefiles/chgres_cube.wcoss_cray
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,23 @@ module rm intel
module load intel/16.3.210
module load cray-mpich/7.2.0
module load craype-haswell
module load cray-netcdf
module load cray-netcdf/4.3.3.1
module load cray-hdf5/1.8.14
module load w3nco-intel/2.0.6
module load nemsio-intel/2.2.3
module load bacio-intel/2.0.2
module load sp-intel/2.0.2
module load sigio-intel/2.1.0
module load sfcio-intel/1.0.0

# module use /gpfs/hps3/emc/nems/noscrub/emc.nemspara/soft/modulefiles
# module load esmf/7.1.0r
export ESMFMKFILE=/gpfs/hps3/emc/global/noscrub/George.Gayno/esmf/8_0_0_bs20/lib/esmf.mk
module use /gpfs/hps3/emc/nems/noscrub/emc.nemspara/soft/modulefiles
module load esmf/8.0.0
module rm gcc
module load gcc/4.9.2

export WGRIB2API_INC=/gpfs/hps3/emc/meso/save/Dusan.Jovic/wgrib2/include
export WGRIB2_LIB=/gpfs/hps3/emc/meso/save/Dusan.Jovic/wgrib2/lib/libwgrib2.a

export WGRIB2API_INC=/gpfs/hps3/emc/global/noscrub/George.Gayno/wgrib2/include
export WGRIB2_LIB=/gpfs/hps3/emc/global/noscrub/George.Gayno/wgrib2/lib/libwgrib2.a

export FCOMP=ftn
export FFLAGS="-O3 -fp-model precise -g -r8 -i4 -qopenmp -convert big_endian -assume byterecl"
Expand Down
5 changes: 3 additions & 2 deletions modulefiles/chgres_cube.wcoss_dell_p3
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
module load ips/18.0.1.163
module load impi/18.0.1
module load NetCDF/4.5.0
# module load ESMF/7_1_0r
export ESMFMKFILE=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/esmf_lib/8_0_0bs20/lib/libO/Linux.intel.64.intelmpi.default/esmf.mk
module load w3nco/2.0.6
module load sp/2.0.2
module load nemsio/2.2.3
module load bacio/2.0.2
module load sfcio/1.0.0
module load sigio/2.1.0

module use /gpfs/dell2/emc/modeling/noscrub/emc.nemspara/soft/modulefiles
module load esmf/8.0.0

export WGRIB2API_INC=/u/Wesley.Ebisuzaki/home/grib2.v2.0.8.intel/lib
export WGRIB2_LIB=/u/Wesley.Ebisuzaki/home/grib2.v2.0.8.intel/lib/libwgrib2.a

Expand Down
2 changes: 1 addition & 1 deletion reg_tests/chgres_cube/driver.cray.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ module rm intel
module load intel/16.3.210
module load cray-mpich/7.2.0
module load craype-haswell
module load cray-netcdf
module load cray-netcdf/4.3.3.1
module load xt-lsfhpc/9.1.3
module list

Expand Down
4 changes: 2 additions & 2 deletions sorc/chgres_cube.fd/input_data.F90
Original file line number Diff line number Diff line change
Expand Up @@ -5531,7 +5531,7 @@ subroutine handle_grib_error(vname,lev,method,value,varnum, iret,var,var8,var3d)
else
call error_handler("ERROR USING MISSING_VAR_METHOD. PLEASE SET VALUES IN" // &
" VARMAP TABLE TO ONE OF: set_to_fill, set_to_NaN,"// &
" , skip, or stop.")
" , skip, or stop.", 1)
endif

end subroutine handle_grib_error
Expand Down Expand Up @@ -5561,7 +5561,7 @@ subroutine read_grib_soil(the_file,inv_file,vname,vname_file,dummy3d,rc)
':0.4-1 m below ground:', ':1-2 m below ground:'/)
else
rc = -1
call error_handler("reading soil levels. File must have 4 soil levels.")
call error_handler("reading soil levels. File must have 4 soil levels.", rc)
endif

call get_var_cond(vname,this_miss_var_method=method,this_miss_var_value=value, &
Expand Down
2 changes: 1 addition & 1 deletion sorc/chgres_cube.fd/program_setup.f90
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ subroutine read_setup_namelist

if (trim(input_type) == "grib2") then
if (trim(grib2_file_input_grid) == "NULL" .or. trim(grib2_file_input_grid) == "") then
call error_handler("FOR GRIB2 DATA, PLEASE PROVIDE GRIB2_FILE_INPUT_GRID")
call error_handler("FOR GRIB2 DATA, PLEASE PROVIDE GRIB2_FILE_INPUT_GRID", 1)
endif
endif

Expand Down
80 changes: 80 additions & 0 deletions util/gdas_init/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
#-----------------------------------------------------------
#
# 1) Compile the chgres_cube program. Invoke
# ./sorc/build_chgres_cube.sh
#
# 2) Ensure links to the 'fixed' directories are
# set. See the ./sorc/link_fixdirs.sh script prolog
# for details.
#
# 3) Set all config variables. See definitions
# below.
#
# 4) Invoke the driver script for your machine (with no
# arguments).
#
# Variable definitions:
# --------------------
# EXTRACT_DIR - directory where data extracted from HPSS
# is stored.
# EXTRACT_DATA - Set to 'yes' to extract data from HPSS.
# If data has been extracted and is located
# in EXTRACT_DIR, set to 'no'.
# RUN_CHGRES - To run chgres, set to 'yes'. To extract
# data only, set to 'no'.
# yy/mm/dd/hh - The year/month/day/hour of your desired
# experiment. Currently, does not support
# pre-ENKF GFS data, prior to
# 2012 May 21 00z.
# LEVS - Number of hybrid levels plus 1. To
# run with 64 levels, set LEVS to 65.
# CRES_HIRES - Resolution of the hires component of
# your experiment.
# CRES_ENKF - Resolution of the enkf component of the
# your experiment.
# UFS_DIR - Location of your checked out UFS_UTILS
# repo.
# OUTDIR - Directory where data output from chgres is stored.
#
#-----------------------------------------------------------

EXTRACT_DIR=/gpfs/dell1/stmp/$USER/gdas.init/input
EXTRACT_DATA=yes

RUN_CHGRES=yes

yy=2017
mm=07
dd=19
hh=18

LEVS=65

CRES_HIRES=C192
CRES_ENKF=C96

UFS_DIR=$PWD/../..

OUTDIR=/gpfs/dell1/stmp/$USER/gdas.init/output

#---------------------------------------------------------
# Dont touch anything below here.
#---------------------------------------------------------

gfs_ver=v15

# No ENKF data prior to 2012/05/21/00z
if [ $yy$mm$dd$hh -lt 2012052100 ]; then
set +x
echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA
exit 2
elif [ $yy$mm$dd$hh -lt 2016051000 ]; then
gfs_ver=v12
elif [ $yy$mm$dd$hh -lt 2017072000 ]; then
gfs_ver=v13
elif [ $yy$mm$dd$hh -lt 2019061200 ]; then
gfs_ver=v14
fi

export EXTRACT_DIR yy mm dd hh UFS_DIR OUTDIR CRES_HIRES CRES_ENKF RUN_CHGRES
export LEVS gfs_ver
147 changes: 147 additions & 0 deletions util/gdas_init/driver.cray.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
#!/bin/bash

#----------------------------------------------------------------------
# Driver script for running on Cray.
#
# Edit the 'config' file before running.
#----------------------------------------------------------------------

set -x

module purge
module load PrgEnv-intel/5.2.56
module rm intel
module load intel/16.3.210
module load cray-mpich/7.2.0
module load craype-haswell
module load cray-netcdf/4.3.3.1
module load xt-lsfhpc/9.1.3
module load prod_util/1.1.0
module load hpss/4.1.0.3
module list

PROJECT_CODE=GFS-DEV

source config

if [ $EXTRACT_DATA == yes ]; then

rm -fr $EXTRACT_DIR
mkdir -p $EXTRACT_DIR

QUEUE=dev_transfer

MEM=6000
WALLT="2:00"

case $gfs_ver in
v12 | v13)
bsub -o log.data.hires -e log.data.hires -q $QUEUE -P $PROJECT_CODE -J get.data.hires -W $WALLT \
-R "rusage[mem=$MEM]" "./get_pre-v14.data.sh hires"
bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \
-R "rusage[mem=$MEM]" "./get_pre-v14.data.sh enkf"
DEPEND="-w ended(get.data.*)"
;;
v14)
bsub -o log.data.hires -e log.data.hires -q $QUEUE -P $PROJECT_CODE -J get.data.hires -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v14.data.sh hires"
bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v14.data.sh enkf"
DEPEND="-w ended(get.data.*)"
;;
v15)
bsub -o log.data.hires -e log.data.hires -q $QUEUE -P $PROJECT_CODE -J get.data.hires -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh hires"
bsub -o log.data.grp1 -e log.data.grp1 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf1 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp1"
bsub -o log.data.grp2 -e log.data.grp2 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf2 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp2"
bsub -o log.data.grp3 -e log.data.grp3 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf3 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp3"
bsub -o log.data.grp4 -e log.data.grp4 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf4 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp4"
bsub -o log.data.grp5 -e log.data.grp5 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf5 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp5"
bsub -o log.data.grp6 -e log.data.grp6 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf6 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp6"
bsub -o log.data.grp7 -e log.data.grp7 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf7 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp7"
bsub -o log.data.grp8 -e log.data.grp8 -q $QUEUE -P $PROJECT_CODE -J get.data.enkf8 -W $WALLT \
-R "rusage[mem=$MEM]" "./get_v15.data.sh grp8"
DEPEND="-w ended(get.data.*)"
;;
esac

else

DEPEND=' '

fi

if [ $RUN_CHGRES == yes ]; then
MEM=2000
QUEUE=dev
MEMBER=hires
WALLT="0:15"
NUM_NODES=1
case $gfs_ver in
v12 | v13)
export OMP_NUM_THREADS=2
export OMP_STACKSIZE=1024M
;;
*)
export OMP_NUM_THREADS=1
;;
esac
export APRUN="aprun -j 1 -n 12 -N 12 -d ${OMP_NUM_THREADS} -cc depth"
if [ $CRES_HIRES == 'C768' ] ; then
WALLT="0:20"
NUM_NODES=3
export APRUN="aprun -j 1 -n 36 -N 12 -d ${OMP_NUM_THREADS} -cc depth"
elif [ $CRES_HIRES == 'C1152' ] ; then
WALLT="0:20"
NUM_NODES=4
export APRUN="aprun -j 1 -n 48 -N 12 -d ${OMP_NUM_THREADS} -cc depth"
fi
case $gfs_ver in
v12 | v13)
bsub -e log.${MEMBER} -o log.${MEMBER} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_pre-v14.chgres.sh ${MEMBER}"
;;
v14)
bsub -e log.${MEMBER} -o log.${MEMBER} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v14.chgres.sh ${MEMBER}"
;;
v15)
bsub -e log.${MEMBER} -o log.${MEMBER} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v15.chgres.sh ${MEMBER}"
;;
esac

WALLT="0:15"
NUM_NODES=1
export APRUN="aprun -j 1 -n 12 -N 12 -d ${OMP_NUM_THREADS} -cc depth"
MEMBER=1
while [ $MEMBER -le 80 ]; do
if [ $MEMBER -lt 10 ]; then
MEMBER_CH="00${MEMBER}"
else
MEMBER_CH="0${MEMBER}"
fi
case $gfs_ver in
v12 | v13)
bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_pre-v14.chgres.sh ${MEMBER_CH}"
;;
v14)
bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v14.chgres.sh ${MEMBER_CH}"
;;
v15)
bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \
-extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v15.chgres.sh ${MEMBER_CH}"
;;
esac
MEMBER=$(( $MEMBER + 1 ))
done
fi
Loading

0 comments on commit 5b422ce

Please sign in to comment.