#!/bin/sh ######################################## # NHC_GUIDANCE_CLIPER5 Preliminary data setup step ######################################## set -ax ##################################################################################### # Run config file to get input parameters # This config file should define the following variables # DATA_IN: Location of working directory, default to /tmpnwprd2 # DEV_ECF: If the job is to be running using ECF, default to YES # SENDDBN: Set to NO for developers, default to YES # COM_IN: Directory for input files, default to /com/$NET/${envir} # COM_OUT: Directory for output file, default to /com/$NET/${envir} # gespath: Directory for the guess or restart files, default to /nwges/${envir} ##################################################################################### ############################### # Specify NET and RUN name ############################## export NET=${NET:-nhcg} export RUN=${RUN:-nhcg} ## TBD export MODEL=${MODEL:-nhcg} ## TBD export CYC=${CYC:-${cyc}} ################################### # stormid ################################### #export stormid=${stormid:-al122014} export stormlabel=${stormlabel:-storm01} echo `date` $0 `date -u` begin export PS4='$SECONDS + ' ########################################################### # obtain unique process id (pid) and make temp directories ########################################################### export pid=$$ export DATA=${DATA:-${DATAROOT}/${jobid}} export WORK_DIR=$DATA # Remove old directory if it exists # rm -rf $WORK_DIR mkdir -p $WORK_DIR while [ ! -d $WORK_DIR ] do mkdir -p $WORK_DIR sleep 1 done cd $WORK_DIR #################################### # Determine Job Output Name on System #################################### export outid="LL$job" export pgmout="OUTPUT.${pid}" export cycle=t${cyc}z ################################################## # SENDECF - Flag Events on ECF # SENDCOM - Copy Files From TMPDIR to $COMOUT # SENDDBN - Issue DBNet Client Calls # RERUN - Rerun fcst from beginning (default no) # SEND2NHC - send a .com file to NHC ################################################## export SENDCOM=${SENDCOM:-YES} export SENDECF=${SENDECF:-YES} export SENDDBN=${SENDDBN:-YES} export SEND2NHC=${SEND2NHC:-YES} export HOMEnhcg=${HOMEnhcg:-${PACKAGEROOT}/${model}.${nhcg_ver}} export EXECnhcg=${EXECnhcg:-$HOMEnhcg/exec} export FIXnhcg=${FIXnhcg:-$HOMEnhcg/fix} export PARMnhcg=${PARMnhcg:-$HOMEnhcg/parm} export USHnhcg=${USHnhcg:-$HOMEnhcg/ush} export SCRnhcg=${SCRnhcg:-$HOMEnhcg/scripts} # JY machine=`hostname |cut -c1` machine=`echo $SITE |cut -c1` if [ $machine = L -o $machine = S ]; then export MACHINE=wcoss fi ############################################# # II. Model Types # 1. Simple track/intensity guidance (no changes from year to year): # CLIPER/SHIFOR/EXTRAP/OCD5/NCEPDATA/NCEPDATA-DEVEL # Input: ATCF data # Output: ATCF data # # 2. Track forecast guidance (Usually no changes from year to year): # TABS/TABM/TABD/UKMet # Input: ATCF data, GFS model forecasts, UKmet input file # Output: ATCF data # # 3. Advanced intensity models (Updates by May 15 each year): # SHIPS/LGEM # Input: ATCF data, GFS model forecasts, GOES imagery, SST analyses, Ocean Heat Content analysis # Output: ATCF data, text products # ############################################# export TYPE1=${TYPE1:-YES}; export TYPE2=${TYPE2:-YES}; export TYPE3=${TYPE3:-YES}; ############################################# # Define primary root storm directories # atcfdir - primary root storm directory # stormdir - storm directory # zcomdir - carq pickoff directory for production execution ############################################# export atcfdir=${atcfdir:-/lfs/h1/ops/prod/dcom/nhc/atcf} export zcomdir=${zcomdir:-${atcfdir}/zcom} ############################################# # COMIN ############################################# export COM_IN=${COM_IN:-$(compath.py ${envir}/com/${NET}/${nhcg_ver})} export COMIN=${COMIN:-${COM_IN}/storm-data} export GFSDIR=${GFSDIR:-$(compath.py prod/com/gfs/${gfs_ver})} export TRACKhwrf=${TRACKhwrf:-$(compath.py prod/com/hwrf/${hwrf_ver})} export TRACKhmon=${TRACKhmon:-$(compath.py prod/com/hmon/${hmon_ver})} export TRACKhafs=${TRACKhafs:-$(compath.py prod/com/hafs/${hafs_ver})} export TRACKglob=${TRACKglob:-$(compath.py prod/com/ens_tracker/${ens_tracker_ver})} # JY export nhcatcfIPdir=${nhcatcfIPdir:-/nhc/save/guidance/storm-data/ncep/return_atcfIP} export nhcatcfIPdir=${nhcatcfIPdir:-${atcfdir}/return_atcfIP} #export COMINnhc=${COMINnhc:-/lfs/h1/nhc/nhc/noscrub/data/guidance} export COMINnhc=${COMINnhc:-${COM_IN}} export DCOMINnhc=${DCOMINnhc:-${DCOMROOT}/nhc} export DCOMbase=${DCOMbase:-${DCOMROOT:?}} export ECMDIR=${DCOMbase:-${DCOMROOT:?}} ################################################# # Check the storm txt file from $COMIN/inpdata/ ################################################# if [ -s $COMIN/inpdata/${stormlabel}.txt ]; then cp $COMIN/inpdata/${stormlabel}.txt . export stormid=`cat ${stormlabel}.txt |awk '{print $1}'` export stormdir=${stormdir:-${WORK_DIR}/storm-data/${stormid}} export storm_day_time=`cat ${stormlabel}.txt |awk '{print $2}'` export storm_tday=`echo $storm_day_time | cut -c 5-8` export storm_ttim=`echo $storm_day_time | cut -c 9-10` else echo " *** No $COMIN/inpdata/${stormlabel}.txt existed. No storm, exiting ..... *** " msg="JOB COMPLETED NORMALLY" echo "$msg" # JY cd $DATA_IN if [ ${KEEPDATA:-NO} != YES ] ; then rm -rf $DATA ; fi date exit fi #===================================================================== # Check for the existence of the storm directory. If not present, # create it. if [[ ! -d $stormdir ]]; then mkdir -p $stormdir fi # Move to the storm directory cd $stormdir # Run setpdy and initialize PDY variables setpdy.sh . ./PDY ############################################# # COMOUT ############################################# export COM_OUT=${COM_OUT:-$(compath.py ${envir}/com/${NET}/${nhcg_ver})} export COMWRKnhc_hcca=${COMWRKnhc_hcca:-${COM_OUT}/hcca/work} export COMOUTnhc_hcca=${COMOUTnhc_hcca:-${COM_OUT}/hcca/output} export COMINnhc_hcca=${COMINnhc_hcca:-${COM_IN}/hcca/input} # JY export COM_STORM_DATA=${COM_STORM_DATA:-/com/nhc/${envir}/storm-data} export COM_STORM_DATA=${COM_STORM_DATA:-${COM_OUT}/storm-data} #export COMOUT=${COMOUT:-${COM_OUT}/storm-data/${stormid}} export COMOUT=${COMOUT:-${COM_OUT}/guidance.${PDY}/${stormid}} export atcfIPdir=${atcfIPdir:-${COM_STORM_DATA}/ncep/return_atcfIP} mkdir -p -m 775 $COMOUT ${atcfIPdir} #export dbnetdir=${dbnetdir:-${PCOMROOT}/${NET}/${stormlabel}} export dbnetdir=${dbnetdir:-$(compath.py ${envir}/com/${NET}/${nhcg_ver})/dbnet/${stormlabel}} mkdir -p -m 775 ${dbnetdir} ################################## # Set up DBNET environment ################################### ############################################# # location of programs (executables and scripts) ############################################# # export prgmsdir=$prgmsdir:-/nhc/save/guidance/prgms} export prgmsdir=${prgmsdir:-${HOMEnhcg}} export exedir=${exedir:-${prgmsdir}/exec} # export scriptdir=${scriptdir:-${prgmsdir}/scripts} export scriptdir=${scriptdir:-${prgmsdir}/ush} export utilsdir=${utilsdir:-${prgmsdir}/ush} ############################################# # location of log directory ############################################# #export logdir=${logdir:-${atcfdir}/storm-data/log} export logdir=${logdir:-${WORK_DIR}/storm-data/log} echo ${logdir}/${stormid} mkdir -p ${logdir}/${stormid} ############################################# # input compute filename convention ############################################# export input=${input:-${stormid}.com} ############################################# # basin info (parse from com) ############################################# export basin=`echo ${stormid} | cut -c1-2 | tr 'a-z' 'A-Z'` ############################################# # output A-deck filename convention ############################################# export output=${output:-a${stormid}.dat} ############################################# # tcvitals - model initialization ############################################# export ncepfile=${ncepfile:-${COM_OUT}/storm-data/ncep/tcvitals} # JY export ncepfile=${ncepfile:-${atcfdir}/storm-data/ncep/tcvitals} ############################################# # CLIPER5 coefficent location ############################################# #export cliper5coef=${cliper5coef:-${prgmsdir}/coefs} export cliper5coef=${cliper5coef:-${PARMnhcg}} ############################################# # EPCLIPER coefficent location ############################################# #export epclipercoef=${epclipercoef:-${prgmsdir}/coefs} export epclipercoef=${epclipercoef:-${PARMnhcg}} env ######################################################## # Check the storm ${stormid} return_atcfIP file ######################################################## if [[ ${basin} = "AL" || ${basin} = "EP" || ${basin} = "CP" ]]; then echo "The basin ${basin} is good for $job to process .... " if [ $basin = CP ]; then # Do not process any CP strom crossed dateline dateline_chk=`grep CARQ ${zcomdir}/${input}|tail -n 1 |awk -F"," '{print $8}'|grep -o '.$'` if [ $dateline_chk = E ]; then echo "$stormid is already pass the dateline, do not process this storm anymore!" echo "**** mv ${stormlabel}.go to ${stormlabel}.processed file ****" mv ${COM_STORM_DATA}/inpdata/${stormlabel}.go ${COM_STORM_DATA}/inpdata/${stormlabel}.processed exit fi fi else echo "The basin ${basin} is not AL or EP or CP for $job to process, exit ...." if [ -s ${COM_STORM_DATA}/inpdata/${stormlabel}.go ]; then echo "**** mv ${stormlabel}.go to ${stormlabel}.processed file ****" mv ${COM_STORM_DATA}/inpdata/${stormlabel}.go ${COM_STORM_DATA}/inpdata/${stormlabel}.processed else echo "**** Warning: ${COM_STORM_DATA}/inpdata/${stormlabel}.go is missing *****" fi echo "**** Remove the $COMOUT since $stormid basin $basin will not be processed ****" rm -rf $COMOUT date exit fi ######################################################## # Check the storm ${stormid} return_atcfIP file ######################################################## if [ -s ${atcfIPdir}/${stormid}.ip ]; then echo " the storm return_atcfIP file ${atcfIPdir}/${stormid}.ip existed ..." else echo " the storm return_atcfIP file ${atcfIPdir}/${stormid}.ip NOT existed ..." echo " Something not right, check the NHC ${nhcatcfIPdir}/${stormid}.ip ... " if [ -s ${nhcatcfIPdir}/${stormid}.ip ]; then cp ${nhcatcfIPdir}/${stormid}.ip ${atcfIPdir}/${stormid}.ip else echo " NHC storm return_atcfIP file ${nhcatcfIPdir}/${stormid}.ip not existed ... " echo " Please check with NHC Storm Guidance Support ...." # export err=90; err_chk; fi fi ######################################################## # Execute the script. ${SCRnhcg}/exnhc_guidance.sh cat $pgmout #cp -pr ${logdir} ${COM_OUT}/storm-data/. cp -pr ${logdir} ${COMOUT}/. #cp -pr ${COMOUT} ${COM_STORM_DATA}/. ######################################################### msg="JOB COMPLETED NORMALLY" echo "$msg" ################################ # Remove the Working Directory ################################ cd $DATA_IN if [ ${KEEPDATA:-NO} != YES ] ; then rm -rf $DATA ; fi date