#!/bin/ksh set -ax #------------------------------------------------------------------------------ # # NAME: ships_run_combined.ksh # # LANGUAGE: K-shell # # PURPOSE: End to end processing of the SHIPS Intensity Hurricane Model. # Both GFS and ECMWF # # DESCRIPTION: Primary script used to drive the SHIPS Intensity model # processing. # # INFORMATION: # National Hurricane Center / Tropical Prediction Center # Department of Commerce - NOAA / NWS # # CATEGORY: Statistical Model # # CALLING SEQUENCE: ships_run.ksh CCYYYY.com aCCYYYY.dat # # INPUTS: # CCYYYY.com <-- computes CARQs # aCCYYYY.dat <-- A-deck format, containing LBAR,BAMM # or other track model in case OFCL is missing # # OUTPUTS: # # EXTERNAL: # # SIDE EFFECTS: Unknown # # RESTRICTIONS: # # EXAMPLE or USAGE: ships_run_combined.ksh al102003.com 1 # 1 = GFS, 2 = ECMWF # # OTHER NOTES: None # # MODIFICATION HISTORY: # Written by: M. DeMaria and F. Horsfall, 15-MAY-2001 (NOAA-NWS) # Revised by: C. Sisko, 20-MAR-2003 (NOAA-NWS) # Revised by: C. Sisko, 20-DEC-2006 (NOAA-NWS) # # REVISION INFORMATION (RCS Keyword): # Modified April 2002 by MD to remove reference to parallel directory # for AVN files # Modified 7/16/02 by MD to add OHC predictors to lsdiag.dat file # Modified 8/14/02 by MD to add IR profiles # Modified 3/20/03 by CS for port to IBM CCS (Frost/Snow) # Modified 6/09/03 by MD for "leading zero" problems on CCS # Modified 1/20/05 by CS for port to BLUE/WHITE & GFS filenaming convention # Modified 6/15/05 by MD for port to BLUE/WHITE of GOES reader # Modified 6/29/05 by CS for port to Production Environment # Modified 12/20/06 by CS for port to MIST/DEW Environment # Modified 05/15/13 by MD for port to WCOSS # Modified 09/04/14 by MD for 2014 ships run, # includes getting new global SST files (GSST) # Modified 05/03/16 by MD for 2016 SHIPS and new RII. # Modified 11/30/16 by MD for transition to WCOSS Cray. # Modified 05/11/16 by SS to include daily Reynolds SST files (dsst), # change OHC file input to new files (eohc) # Modified 06/13/18 by SS to run GFS or ECMWF version based on input # Modified 04/29/19 by SS to specify GFS/FV3/ECM # Modified 04/14/20 by SS to fix eohc file search & make similar to dsst # (coordinated fixes with Galina), GOES data find fixed # Modified 04/27/20 by SS to add dhou definition for EC SHIPS run (bug fix) # Modified 05/05/20 by SS to fix passed env var for EC SHIPS (DCOMbase) # Modified 02/06/23 by MD to skip searching for weekly Reynolds SST files #----------------------------------------------------------------------------------- #--------------------- #... CONFIGURATION ... #--------------------- # --- Operational vs Test mode switch --- # Set ioper=1 if this is the operational version that includes # the grib decoding. # # The IR profiles are also created with ioper=1 # # Set ioper=0 to skip the grib decoding. # # If ioper=0, it is assumed that packed ASCII # files containing the GFS forecasts are already # available, and the anupdatem.ksh script is skipped. # # The script/code for creating the IR profiles is also # skipped with this option. The IR files are assumed to already # be available in the idir directory. # ioper=1 # --- Heat Content Inclusion Switch --- # # Set iohc=1 if Ocean Heat Content data are needed # or iohc=0 if they are not needed iohc=1 #-------------------- #... DEFINE PATHS ... #-------------------- # note - user should define these paths #module load prod_util #module load prod_util/1.1.0 # # common path # commonp="/nhc/save/guidance/prgms" commonp=${HOMEnhcg} # # Specify name of directory containing required scripts sdir=${commonp}"/ush/" ### "/scripts/" #sdir=${commonp}"/test/ships/" # # Specify name of directory containing executables xdir=${commonp}"/exec/" # Specify name of directory containing static data files #ydir=${commonp}"/coefs/" ydir=${commonp}"/parm/" # # Specify name of directory containing packed ASCII forecast model files # for diagnositic programs #pdir="/nhc/noscrub/data/guidance/ships/gfs/" ## Need to check ?? #pdir="${COM_STORM_DATA}/ships/gfs/" ## Need to check ?? #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then pdir="${COM_STORM_DATA}/ships/gfs/" ## Need to check ?? fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then pdir="${COM_STORM_DATA}/ships/ecm/" fi # # Specify name of directory containing packed SST files #odir="/nhc/noscrub/data/guidance/ships/sst/" ## Need to check ?? odir="${DCOMINnhc}/ships/sst/" ## Need to check ?? # # Specify name of directory containing OHC files #ddir="/nhc/noscrub/data/guidance/ships/ohc/" ## Need to check ?? ddir="${DCOMINnhc}/ships/ohc/" ## Need to check ?? # # Specify name of directory to put ships text output #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then tdir="${COMOUT}/ships/stext/" fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then tdir="${COMOUT}/ships/etext/" fi mkdir -p -m 775 ${tdir} # # Specify name of directory for saving IR profiles (IBM) idir="${COMOUT}/ships/irprofs/" ## OUTPUT dir ships/irprofs/ mkdir -p -m 775 ${idir} # Specify the name of the directory contains the # the McIDAS AREA files iidir="${DCOMINnhc}/ships/GOESarea/" ## Need to check ?? # # Log directory #ldir="/gpfs/hps/nhc/save/guidance/storm-data/log/" ldir="${WORK_DIR}/storm-data/log/${stormid}/" mkdir -p -m 775 $ldir #--------------------------------------- #... EXPORT ENV for COEFS/DATA files ... #--------------------------------------- # # Coefs and Data files ... #export SHIPS_COEF="/nhc/save/guidance/prgms/coefs/" ## Need to check ?? export SHIPS_COEF="$HOMEnhcg/parm/" #------------ #... LOGS ... #------------ # # Set LOG equal to the storm ID #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then LOG="SHIPS-"`echo ${1##/*/} | sed -e 's/\.com//'` fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then LOG="ESHIPS-"`echo ${1##/*/} | sed -e 's/\.com//'` fi LOG=${LOG}.`date +"%Y%m%d.%H%M"`".log" #LOG=${ldir}${LOG}.`date +"%Y%m%d.%H%M"`".log" #----------------------- #... Other Variables ... #----------------------- # -- for RI use -- stormid=`echo ${1} | cut -c1-8` currdtg=`tail -1 ${1} | cut -d',' -f3 | sed -e 's/ *//g'` # ----------------------------------------------------------- # BEGIN SHIPS PREP : Track, Grib and Input Deck Creation # ----------------------------------------------------------- echo "-------------------------------------------------------" > $LOG #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ]; then echo " ***** GFS SHIPS RUN STARTED *****" >> $LOG fi if [ "${2}" = "fv3" ]; then echo " ***** GFS-FV3 SHIPS RUN STARTED *****" >> $LOG fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then echo " ***** ECMWF SHIPS RUN STARTED *****" >> $LOG fi echo "-------------------------------------------------------">> $LOG echo "start time: "`date +"%Y/%m/%d %T %Z doy=(%y%j)"` >> $LOG echo "" >> $LOG # -------------------------------- # Check if ECMWF SHIPS can run yet # -------------------------------- if [ "${2}" = "ecm" ]; then #This section creates variable needed to construct path to grib file currdtg_epoch=`date -d "${currdtg:0:8} ${currdtg:8:2}:00:00" "+%s"` prevday_epoch=$(( ${currdtg_epoch} - (3600*24) )) prevday=`date -d @${prevday_epoch} "+%Y%m%d%H"` dhou=`echo $currdtg | cut -c 9-10` if [ "${dhou}" -eq "00" ]; then dirdate=`echo ${prevday} | cut -c 1-8` inithr="12" else if [ "${dhou}" -eq "06" ] || [ "${dhou}" -eq "12" ]; then dirdate=`echo ${currdtg} | cut -c 1-8` inithr="00" else if [ "${dhou}" -eq "18" ]; then dirdate=`echo ${currdtg} | cut -c 1-8` inithr="12" fi fi fi starttime="${dirdate}${inithr}" start_epoch=`date -d "${starttime:0:8} ${starttime:8:2}:00:00" "+%s"` fhr180_epoch=$(( ${start_epoch} + (3600*180) )) fhr180=`date -d @${fhr180_epoch} "+%Y%m%d%H"` starttime_cut=`echo ${starttime} | cut -c 5-10` fhr180_cut=`echo ${fhr180} | cut -c 5-10` ecgribfile="${DCOMbase}/${dirdate}/wgrbbul/ecmwf/DCD${starttime_cut}00${fhr180_cut}001" echo "Looking for this 180 h ECMWF grib file before proceeding: ${ecgribfile}" >> $LOG if [ -f ${ecgribfile} ]; then echo " ECMWF is available now, ESHIPS will run" >> $LOG else echo " ECMWF is not available yet, ESHIPS & DTOPS(ECMWF) will *NOT* run." >> $LOG echo " Halting ESHIPS processing now. " >> $LOG exit 1 fi fi # --------------------------- # Compute and best track info # # Create the comab.dat file for the model run rm -f comab.dat cabname=${1} # diagnostic echo "[${cabname}] [${1}]" >> $LOG hostname >> $LOG echo "cp -f $cabname comab.dat" >> $LOG cp -f $cabname comab.dat >> $LOG 2>&1 ls -l comab.dat >> $LOG 2>&1 echo "***** COM FILE *****" >> $LOG echo "-------------------------------------------------------" >> $LOG echo " [${cabname}] " >> $LOG cat comab.dat >> $LOG echo "-------------------------------------------------------" >> $LOG echo "" >> $LOG # # Check to make sure comab.dat file is not empty if [ ! -s comab.dat ]; then echo "*** ERROR: comab.dat file is empty, halt ships processing." \ >> $LOG exit 1 fi # ------------------------- # UNPACK GRIB: if necessary # ------------------------- # --- Look for GFS Grib files, Unpack if necessary --- # Update the GFS files if necessary #if [[ $ioper -eq 1 && ${2} -eq 1 ]]; then if [ ${ioper} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then #if [ $ioper -eq 5 ]; then # Check the operational directory for updated GFS files # echo "***** Skipping GRIB Unpacking *****" >> $LOG echo "***** GRIB Unpacking *****" >> $LOG echo "-------------------------------------------------------" >> $LOG ${sdir}"anupdatem.ksh" cat anupdate.log >> $LOG echo "-------------------------------------------------------" >> $LOG echo "" >> $LOG ls -ltrd *.bin A*PACK.DAT rm -f *.bin rm -f A*PACK.DAT # rm -r anupdate.log fi fi # Update the ECMWF files if necessary #if [[ $ioper -eq 1 && ${2} -eq 2 ]]; then if [[ $ioper -eq 1 && "${2}" = "ecm" ]]; then # Check the operational directory for updated ECMWF files echo "***** ECMWF GRIB Unpacking *****" >> $LOG echo "-----------------------------------------------------" >> $LOG ${sdir}"enupdate.ksh" cat enupdate.log >> $LOG echo "-----------------------------------------------------" >> $LOG ls -ltrd *.bin E*PACK.DAT rm -f *.bin rm -f E*PACK.DAT fi # --- SHIPS pre-processing stage: Interpolated Track --- # echo "***** SHIPS PRE-PROCESSING *****" >> $LOG echo "-------------------------------------------------------" >> $LOG echo " " >> $LOG echo "---- creating the SHIPS input stormcard ----" >> $LOG # --- SHIPS Input Stormcard --- # # Make the stormcard from the comab.dat file rm -f stormcard.dat rm -f istormcard.dat export pgm=stormcard.x prep_step startmsg ${xdir}"stormcard.x" >> $LOG export err=$?; err_chk; # echo "" >> $LOG echo "---- stormcard contents ----" >> $LOG cat stormcard.dat >> $LOG echo "---- stormcard contents ----" >> $LOG # # manipulate date from the stormcard rm -f sbdd.dat export pgm=sbdd.x prep_step startmsg ${xdir}"sbdd.x" >> $LOG export err=$?; err_chk; echo "" >> $LOG echo "---- stormcard dates ----" >> $LOG cat sbdd.dat >> $LOG echo "---- stormcard dates ----" >> $LOG echo "" >> $LOG # # move stormcard into input file mv -f stormcard.dat istormcard.dat echo "---- Ocean parameters ----" >> $LOG echo "" >> $LOG # # --- Get the most recent SST file rm -f RSSTYY_XMMDD_PACK.DAT rm -f GSSTYYYYMMDD.dat let fdate=`tail -1 sbdd.dat | cut -c1-6` # #Adjust fdate for Y2K compliance if [ $fdate -gt 500000 ] then let ftemp=$((19000000 + fdate)) fi if [ $fdate -le 500000 ] then let ftemp=$((20000000 + fdate)) fi fdate=$ftemp # ++Begin comment out section for getting weekly Reynolds SST (no longer available) ++ # #for line0 in `ls $odir | grep RSST` #do # let oyear=`echo $line0 | cut -c5-6` # let omon1=`echo $line0 | cut -c9-9` # let omon2=`echo $line0 | cut -c10-10` # let oday1=`echo $line0 | cut -c11-11` # let oday2=`echo $line0 | cut -c12-12` # let odate=$((10000*oyear + 1000*omon1 + 100*omon2 + 10*oday1 + oday2)) # # Adjust odate for Y2K compliance # if [ $odate -gt 500000 ] # then # let otemp=$((19000000 + odate)) # fi # if [ $odate -le 500000 ] # then # let otemp=$((20000000 + odate)) # fi # odate=$otemp # # if [ $fdate -ge $odate ] # then # ofile=$odir$line0 # fi #done # #echo "---- most-recent SST file ----"${ofile} >> $LOG #echo "" >> $LOG #cp -f ${ofile} RSSTYY_XMMDD_PACK.DAT # --- Get global SST file #for line0 in `ls $odir | grep GSST` #do # let oyear=`echo $line0 | cut -c5-8` # let omon1=`echo $line0 | cut -c9-9` # let omon2=`echo $line0 | cut -c10-10` # let oday1=`echo $line0 | cut -c11-11` # let oday2=`echo $line0 | cut -c12-12` # let odate=$((10000*oyear + 1000*omon1 + 100*omon2 + 10*oday1 + oday2)) # # if [ $fdate -ge $odate ] # then # ofile=$odir$line0 # fi #done #cp $ofile GSSTYYYYMMDD.dat #echo "---- most recent global SST file ---- "$ofile >> $LOG #echo " " >> $LOG # # End comment out section for getting weekly Reynolds SST (no longer available) # --- Reynolds daily SST Inclusion --- # # Get the most recent daily SST file rm -f DSSTYYYMMDD.dat basin=`head -1 comab.dat | cut -c1-1` cbasin="gbl" if [ "$basin" = "A" ] then cbasin="atl" fi if [ "$basin" = "E" ] then cbasin="pac" fi if [ "$basin" = "C" ] then cbasin="pac" fi # Fix for leading zero problem let cfirst=`tail -1 sbdd.dat | cut -c1-1` if [ $cfirst -eq 0 ] then let fdate=`tail -1 sbdd.dat | cut -c2-6` else let fdate=`tail -1 sbdd.dat | cut -c1-6` fi # #Adjust fdate for Y2K compliance if [ $fdate -gt 500000 ] then let ftemp=$((19000000 + fdate)) fi if [ $fdate -le 500000 ] then let ftemp=$((20000000 + fdate)) fi fdate=$ftemp # # Find daily SST file for line0 in `ls $odir | grep dsst |grep $cbasin` do let oyear=`echo $line0 | cut -c10-13` let omon1=`echo $line0 | cut -c14-14` let omon2=`echo $line0 | cut -c15-15` let oday1=`echo $line0 | cut -c16-16` let oday2=`echo $line0 | cut -c17-17` let odate=$((10000*oyear + 1000*omon1 + 100*omon2 + 10*oday1 + oday2)) # if [ $fdate -ge $odate ] #replace previous line by this to use DSST that is 1 day old #if [[ $fdate -ge $odate ]] && [[ $((fdate - odate)) -ge 1 ]]; then ofile=$odir$line0 fi done # cp $ofile DSSTYYYYMMDD.dat echo "Most recent daily SST file: "$ofile >> ships_run.log echo " " >> ships_run.log # # --- Upper Ocean Heat Content Inclusion (optional) --- if [ $iohc -eq 1 ] then # Get the most recent OHC file rm -f OHCYYYYMMDD.DAT basin=`head -1 comab.dat | cut -c1-1` cbasin="gbl" if [ "$basin" = "A" ] then cbasin="atl" fi if [ "$basin" = "E" ] then cbasin="pac" fi if [ "$basin" = "C" ] then cbasin="pac" fi # Fix for leading zero problem let cfirst=`tail -1 sbdd.dat | cut -c1-1` if [ $cfirst -eq 0 ] then let fdate=`tail -1 sbdd.dat | cut -c2-6` else let fdate=`tail -1 sbdd.dat | cut -c1-6` fi # Adjust fdate for Y2K compliance if [ $fdate -gt 500000 ] then let ftemp=$((19000000 + fdate)) fi if [ $fdate -le 500000 ] then let ftemp=$((20000000 + fdate)) fi fdate=$ftemp # for line0 in `ls $ddir | grep eohc_ | grep $cbasin` do let ohyear=`echo $line0 | cut -c10-13` let ohmon1=`echo $line0 | cut -c14-14` let ohmon2=`echo $line0 | cut -c15-15` let ohday1=`echo $line0 | cut -c16-16` let ohday2=`echo $line0 | cut -c17-17` let ohdate=$((10000*ohyear + 1000*ohmon1 + 100*ohmon2 + 10*ohday1 + ohday2)) # # use next line to select the most recent EOHC file if [ $fdate -ge $ohdate ] # use next line to select the EOHC file that is at least one day old # if [[ $fdate -ge $ohdate ]] && [[ $((fdate - ohdate)) -ge 1 ]]; then ohfile=$ddir$line0 fi done # echo "---- most-recent Upper Ocean Heat Content file ----\n"${ohfile} >> $LOG echo "" >> $LOG cp -f $ohfile OHCYYYYMMDD.DAT # fi # --- GFS/ECMWF Packed File Inclusion --- # # Get the most recent GFS/ECMWF files (packed ASCII format) # ifound=0 for line0 in `cat sbdd.dat` do ayear=`echo $line0 | cut -c1-2` amon1=`echo $line0 | cut -c3-3` amon2=`echo $line0 | cut -c4-4` aday1=`echo $line0 | cut -c5-5` aday2=`echo $line0 | cut -c6-6` atime=`echo $line0 | cut -c7-8` # # The following section avoids computation with amon and aday # with leading zeros such as 08 and 09 if [ $amon1 -eq 0 ] then amon=$amon2 fi # if [ $amon1 -ge 1 ] then amon=$((10*$amon1 + $amon2)) fi # if [ $aday1 -eq 0 ] then aday=$aday2 fi # if [ $aday1 -ge 1 ] then aday=$((10*$aday1 + $aday2)) fi # # end special computations # if [ $atime -gt 11 ] then aday1=$(($aday1 + 5)) fi # axy="X" # if [ $atime -eq 06 ] then axy="Y" fi # if [ $atime -eq 18 ] then axy="Y" fi # #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then dfile="A072"$ayear"_"$axy$amon1$amon2$aday1$aday2"_PACK.DAT" fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then dfile="E072"$ayear"_"$axy$amon1$amon2$aday1$aday2"_PACK.DAT" fi # yymmdd=`echo $line0 | cut -c1-6` # export GFS_PACK_DIR=/com/nhc/${envir}/storm-data/gfs_PACK.20${yymmdd}/ ## echo "redefined pdir to ${GFS_PACK_DIR} dir " ## pdir=${GFS_PACK_DIR} if [ -s $pdir$dfile ] then ifound=1 ayears=$ayear axys=$axy amons=$amon1$amon2 adays=$aday1$aday2 fi done echo "---- Model parameters ----" >> $LOG echo "" >> $LOG # if [ $ifound -eq 1 ] then prefix=$ayears"_"$axys$amons$adays"_PACK.DAT" icount=0 #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then echo "---- GFS Input (lsdiag) ----" >> $LOG for line0 in `ls $pdir | grep $prefix | grep A` do icount=$((icount+1)) jcount=$((icount+1000)) qnum=`echo $jcount | cut -c3-4` echo "F"$qnum" = "$line0 >> $LOG cp $pdir$line0 . done echo "---- GFS Input (lsdiag) ----" >> $LOG fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then echo "---- ECMWF Input (lsdiag) ----">> $LOG #Changed to ECMWF pack filename format - E*_PACK.DAT #Copy over as GFS pack filename format - A*_PACK.DAT for line0 in `ls $pdir | grep $prefix | grep E` do icount=$((icount+1)) jcount=$((icount+1000)) qnum=`echo $jcount | cut -c3-4` epackfile=`basename $line0` packname=`echo $epackfile | cut -c2-21` apackfile="A"$packname echo "F"$qnum" = "$epackfile $apackfile >> $LOG cp $pdir$epackfile $apackfile cp $pdir$epackfile . done echo "---- ECMWF Input (lsdiag) ----">> $LOG fi # fi # --- Run the TAB model echo " " >> $LOG echo "---- Run the TAB model for back-up track forecast for SHIPS run ----" >> $LOG echo " " >> $LOG rm -f tab.log rm -f tab.dat rm -f tab.tst rm -f tab.com cp comab.dat tab.com export pgm=tab.x prep_step startmsg ${xdir}"tab.x" export err=$?; err_chk; # --- Get track forecast (first choice OFCL or OFCL, 2nd choice BAMM or TABM models) # echo "---- getting a track forecast ----" >> $LOG echo "" >> $LOG # # Create the itrack.dat file (using BAMM for initial) #mdl="BAMM" #rm -f itrack.dat #itrname=${2} #cp ${itrname} itrack.dat # # Create the itrack.dat file (using TABM for initial) mdl="TABM" rm -f ntrack.dat grep TABM tab.dat > ntrack.dat #itrname=${2} #cp ${itrname} itrack.dat # # Begin track mods ++ # Check to see if $mdl track can be replaced by OFCI rm -f ntrack.com cat $cabname > ntrack.com # export pgm=ntrack.x prep_step startmsg ${xdir}"ntrack.x" >> $LOG export err=$?; err_chk; # if [ -s ntrack.dat ] then ###mdl="OFCI" mdl=`cat ntrack.dat | head -1 | sed 's/ //g' | cut -d',' -f5` cp -f ntrack.dat itrack.dat fi # # End track mods ++ # # Check to make sure itrack.dat file is not empty if [ ! -s itrack.dat ] then echo "*** ERROR ***: itrack.dat file is empty, halt ships processing." \ >> $LOG exit 1 fi # echo "---- itrack.dat ---- using ${mdl}" >> $LOG cat itrack.dat >> $LOG echo "---- itrack.dat ---- " >> $LOG echo "" >> $LOG # --- Create Input for large-Scale Diagnostics --- # # Make the input file for the diagnostic program echo "" >> $LOG echo "---- Creating input for lsdiag ----" >> $LOG echo "" >> $LOG rm -f lsdiag.inp rm -f INFILE.in export pgm=ilsin.x prep_step startmsg ${xdir}"ilsin.x" >> $LOG export err=$?; err_chk; export pgm=eohcadd.x prep_step startmsg cp -f lsdiag.inp lsdiag_inp.dat rm -f lsdiag_out.dat ${xdir}"eohcadd.x" >> $LOG if [ -s lsdiag_out.dat ] then cp -f lsdiag_out.dat lsdiag.inp fi export err=$?; err_chk; #--- IR Profiles --- (This section was re-written for WCOSS) echo "---- Satellite Input ----" >> $LOG cat INFILE.in >> $LOG echo "---- Satellite Input ----" >> $LOG echo "" >> $LOG # --- Dump McIDAS AREA files to ASCII files rm -f irdumpc.inp head -1 INFILE.in > irdumpc.inp rm -f irtemp1.dat rm -f irtemp2.dat rm -f irtemp3.dat irf1=`head -2 INFILE.in | tail -1` irf2=`head -3 INFILE.in | tail -1` irf3=`head -4 INFILE.in | tail -1` #Fix for non-zero GOES minute if [ ! -f ${iidir}${irf1} ] then irf1=`ls ${iidir}${irf1%?}[0-9] | head -1 | tail -c29` fi if [ ! -f ${iidir}${irf2} ] then irf2=`ls ${iidir}${irf2%?}[0-9] | head -1 | tail -c29` fi if [ ! -f ${iidir}${irf3} ] then irf3=`ls ${iidir}${irf3%?}[0-9] | head -1 | tail -c29` fi echo "Get t= 0 hr IR file from "$iidir$irf1 >> $LOG echo "Get t=-1.5 hr IR file from "$iidir$irf2 >> $LOG echo "Get t=-3 hr IR file from "$iidir$irf3 >> $LOG echo " " >> $LOG cp $iidir$irf1 irtemp1.dat cp $iidir$irf2 irtemp2.dat cp $iidir$irf3 irtemp3.dat rm -f *IRPC0.dat export pgm=irdumpc_rect.x prep_step startmsg $xdir"irdumpc_rect.x" export err=$?; err_chk; #Use if statements below only if GOESR & legacy #are used simultaneously # #if [[ "$irf1" == GW* ]]; then # export pgm=irdumpc.x # prep_step # startmsg # $xdir"irdumpc.x" # export err=$?; err_chk; #fi #if [[ "$irf1" == GE* ]]; then # export pgm=irdumpc_rect.x # prep_step # startmsg # $xdir"irdumpc_rect.x" # export err=$?; err_chk; #fi export pgm=mkirprof.x prep_step startmsg $xdir"mkirprof.x" #export err=$?; err_chk; rm -f INFILE.in datt=`head -1 istormcard.dat | cut -c1-6` timt=`head -2 istormcard.dat | tail -1 | cut -c1-2` atnm=`tail -1 istormcard.dat | cut -c1-6` if [ ioper -eq 0 ]; then # Copy IR profile data rm -f IRRP1.dat rm -f IRRP1.inf rm -f IRRP2.dat rm -f IRRP2.inf rm -f IRRP3.dat rm -f IRRP3.inf cp -f $idir$datt$timt$atnm"_IRRP1.dat" IRRP1.dat cp -f $idir$datt$timt$atnm"_IRRP1.inf" IRRP1.inf cp -f $idir$datt$timt$atnm"_IRRP2.dat" IRRP2.dat cp -f $idir$datt$timt$atnm"_IRRP2.inf" IRRP2.inf cp -f $idir$datt$timt$atnm"_IRRP3.dat" IRRP3.dat cp -f $idir$datt$timt$atnm"_IRRP3.inf" IRRP3.inf fi # --- IR Predictor Inclusion --- # # Add the IR predictors rm -f irpred.dat cp -f lsdiag.inp irpred.inp # compute the predictors export pgm=irpred.x prep_step startmsg ${xdir}"irpred.x" >> $LOG export err=$?; err_chk; # move IR data into lsdiag input (if available) if [ -s irpred.dat ]; then mv -f irpred.dat lsdiag.inp fi # Add the IRPC predictors if they are available fnirpc=$datt$timt$atnm"_IRPC0.dat" if [ -s $fnirpc ] then lslen=`wc -l lsdiag.inp | cut -c1-2` lslenm=$(( lslen - 1 )) head -$lslenm lsdiag.inp > irpc.inp cat $fnirpc >> irpc.inp tail -1 lsdiag.inp >> irpc.inp mv irpc.inp lsdiag.inp fi # echo "---- Final Diagnostic Input ----" >> $LOG cat lsdiag.inp >> $LOG echo "---- Final Diagnostic Input ----" >> $LOG echo "" >> $LOG # ----------------------------------------------------------- # BEGIN SHIPS MODEL RUNS : large-scale diagnostics and model # ----------------------------------------------------------- echo "***** SHIPS MODEL RUN *****" >> $LOG echo "-------------------------------------------------------" >> $LOG echo "" >> $LOG # --- Compute large-Scale Diagnostics --- # # Run the diagnostic program rm -f lsdiag.dat rm -f lsdiag.log echo "---- Diagnostic Log ----" >> $LOG # Run lsdiag export pgm=lsdiag.x prep_step startmsg ${xdir}"lsdiag.x" >> $LOG export err=$?; err_chk; cat lsdiag.log >> $LOG echo "---- Diagnostic Log ----" >> $LOG echo "" >> $LOG # echo "---- Final Diagnostic Output ----" >> $LOG cat lsdiag.dat >> $LOG echo "---- Final Diagnostic Output ----" >> $LOG echo "" >> $LOG # --- SHIPS runtime model (apply preditors to current data) --- # remove any lingering output rm -f ships.dat rm -f ships.edk rm -f ships.txt rm -f ships.log echo "---- SHIPS runtime ----" >> $LOG export pgm=iships.x prep_step startmsg ${xdir}"iships.x" >> $LOG 2>&1 #export err=$?; err_chk; ## commented out by SPA to let other model to run .... cat ships.log >> $LOG echo "---- SHIPS runtime ----" >> $LOG echo "" >> $LOG # ----------------------------------------------------------- # RAPID INTENSIFICATION AID # ----------------------------------------------------------- # # Note: The RI aid is now run directly as part of the main iships.f # routine, so the processing below is turned off. #echo "---- Calling RI routine ----" >> $LOG #echo "${xdir}riaid.x ${stormid} ${currdtg} ships.dat rivalues.dat" >> $LOG #echo "" >> $LOG #cp $ydir"rivalues.dat" . #cat rivalues.dat >> $LOG #if [ -s rivalues.dat ]; then #${xdir}riaid.x ${stormid} ${currdtg} ships.dat rivalues.dat >> $LOG #echo "" >> $LOG #fi #if [ -s rapid.dat ]; then #cat rapid.dat >> ships.dat #echo "---- RI Output ----" >> $LOG #cat rapid.dat >> $LOG #echo "--------------------" >> $LOG #echo "" >> $LOG #fi # ----------------------------------------------------------- # POST SHIPS: test file creation, A-deck creation and Cleanup # ----------------------------------------------------------- # GFS version #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ] || [ "${2}" = "fv3" ]; then # --- Save provisional TAB track forecast --- if [ -s tab.dat ]; then # Add TAB forecasts to ships.dat file cat tab.dat >> ships.dat fi # --- SHIPS text file result --- # echo "---- SHIPS text output ----" >> $LOG shipstxtheader=" * GFS version *" echo "${shipstxtheader}" > ships.temptxt cat ships.txt >> ships.temptxt mv ships.temptxt ships.txt cat ships.txt >> $LOG echo "---- SHIPS text output ----" >> $LOG echo "" >> $LOG # --- SHIPS ATCF Deck creation --- # echo "---- SHIPS ATCF output ----" >> $LOG cat ships.dat >> $LOG echo "---- SHIPS ATCF output ----" >> $LOG echo "" >> $LOG ls -ltrd ships.dat if [[ -s ships.dat ]]; then echo " ships.dat is generated ..." # cp -p ships.dat ${COMOUT}/. else # msg="=== Warning: ships.dat not generated from $pgm ===" # postmsg "$jlogfile" "$msg" echo "=== Warning: ships.dat not generated from iships.x===" cat $LOG fi if [[ -s ships.edk ]]; then echo " ships.edk is generated ..." # cp -p ships.edk ${COMOUT}/. fi # # Copy ships text file to the appropriate directory datm=`head -1 ships.tst | cut -c7-14` atnm=`head -1 ships.tst | cut -c170-175` #atnm=`head -1 ships.tst | cut -c126-131` cp -f ships.txt $tdir$datm$atnm"_ships.txt" cp -f lsdiag.dat $tdir$datm$atnm"_lsdiag.dat" # copy the ships text file to $tdir directory # cp -f ships.txt ${tdir}$datm$atnm"_ships.txt" cp -f ships.dat ${tdir}$datm$atnm"_ships.dat" cp -f ships.edk ${tdir}$datm$atnm"_ships.edk" cp -f lsdiag.dat ${tdir}$datm$atnm"_lsdiag.dat" # # Store the IR profiles if [ ioper -eq 1 ]; then if [ -s IRRP1.dat ]; then # cp -f IRRP1.dat $idir$datm$atnm"_IRRP1.dat" # cp -f IRRP1.inf $idir$datm$atnm"_IRRP1.inf" cp -f IRRP1.dat $idir$datm$atnm"_IRRP1.dat" cp -f IRRP1.inf $idir$datm$atnm"_IRRP1.inf" fi if [ -s IRRP2.dat ]; then # cp -f IRRP2.dat $idir$datm$atnm"_IRRP2.dat" # cp -f IRRP2.inf $idir$datm$atnm"_IRRP2.inf" cp -f IRRP2.dat $idir$datm$atnm"_IRRP2.dat" cp -f IRRP2.inf $idir$datm$atnm"_IRRP2.inf" fi if [ -s IRRP3.dat ]; then # cp -f IRRP2.dat $idir$datm$atnm"_IRRP2.dat" # cp -f IRRP2.inf $idir$datm$atnm"_IRRP2.inf" cp -f IRRP3.dat $idir$datm$atnm"_IRRP3.dat" cp -f IRRP3.inf $idir$datm$atnm"_IRRP3.inf" fi if [ -s $fnirpc ]; then cp -f $fnirpc $idir$fnirpc fi fi fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then rm -f eships.dat rm -f eships.txt rm -f elsdiag.dat # Copy files over to ECMWF prefix, change ids and add disclaimer mv lsdiag.dat elsdiag.dat # Add disclaimer to beginning of ships.txt file to indicate ECMWF SHIPS eshipstxtheader=" * ECMWF version * " echo "${eshipstxtheader}" > eships.txt cat ships.txt >> eships.txt rm -f ships.txt # --- Save provisional TAB track forecast --- if [ -s tab.dat ]; then # Add TAB forecasts to ships.dat file cat tab.dat >> ships.dat fi # Copy ships.dat file after renaming for ECMWF versions # SHIP --> SHPE; DSHP --> DSPE; LGEM --> LGME # TABD --> TBDE; TABM --> TBME; TABS --> TBSE cat ships.dat | sed 's/SHIP/SHPE/g' | sed 's/DSHP/DSPE/g' | sed 's/LGEM/LGME/g' | sed 's/TABD/TBDE/g' | sed 's/TABM/TBME/g' | sed 's/TABS/TBSE/g' > eships.dat rm -f ships.dat # --- SHIPS text file result --- # echo "---- SHIPS text output ----" >> $LOG cat eships.txt echo "---- SHIPS text output ----" >> $LOG echo "" >> $LOG # --- SHIPS ATCF Deck creation --- # echo "---- SHIPS ATCF output ----" >> $LOG cat eships.dat >> $LOG echo "---- SHIPS ATCF output ----" >> $LOG echo "" >> $LOG ls -ltrd eships.dat if [[ -s eships.dat ]]; then echo " eships.dat is generated ..." # cp -p eships.dat ${COMOUT}/. else # msg="=== Warning: eships.dat not generated from $pgm ===" # postmsg "$jlogfile" "$msg" echo "=== Warning: eships.dat not generated from iships.x===" cat $LOG fi # Copy ships.edk file after renaming for ECMWF versions # RIOL --> EIOL; RIOD --> EIOD; RIOB --> EIOB; RIOC --> EIOC cat ships.edk | sed 's/RIOL/EIOL/g' | sed 's/RIOD/EIOD/g' | sed 's/RIOB/EIOB/g' | sed 's/RIOC/EIOC/g' > eships.edk rm -f ships.edk if [[ -s eships.edk ]]; then echo " eships.edk is generated ..." # cp -p eships.edk ${COMOUT}/. fi # Copy ships text file to the appropriate directory datm=`head -1 ships.tst | cut -c7-14` atnm=`head -1 ships.tst | cut -c170-175` #atnm=`head -1 ships.tst | cut -c126-131` cp -f eships.txt $tdir"EC_"$datm$atnm"_ships.txt" cp -f elsdiag.dat $tdir"EC_"$datm$atnm"_lsdiag.dat" cp -f eships.dat $tdir"EC_"$datm$atnm"_ships.dat" cp -f eships.edk $tdir"EC_"$datm$atnm"_ships.edk" fi rm -f *IRRP* rm -f *IRPC0.dat # --- Perform Cleanup --- # Clean up after ships model run rm -f SBF* rm -f A*PACK.DAT rm -f E*PACK.DAT rm -f RSSTYY* rm -f GSSTYY* rm -f DSSTYY* rm -f OHCYYYY* rm -f gfs_cases rm -f gfs_casest rm -f gfs_indate rm -f ilsin.log #rm -f lsdiag.dat rm -f lsdiag.log rm -f irpred.inp rm -f lsdiag.inp #rm -f lsdiag_out.dat #rm -f lsdiag_inp.dat rm -f istormcard.dat rm -f itrack.dat #rm -f comab.dat rm -f sbdd.dat rm -f ships.log rm -f ships.txt rm -f unpackg.log rm -f ntrack.com rm -f ntrack.dat rm -f INFILE.in rm -f rivalues.dat rm -f rapid.dat rm -f packfile rm -f irtemp1.dat rm -f irtemp2.dat rm -f irtemp3.dat rm -f irdumpc.inp rm -f irdump1.dat rm -f irdump2.dat rm -f irdump3.dat rm -f elsdiag.dat rm -f eships.txt echo "-------------------------------------------------------" >> $LOG #if [ ${2} -eq 1 ]; then if [ "${2}" = "gfs" ]; then echo " ***** GFS SHIPS RUN COMPLETED *****" >> $LOG fi if [ "${2}" = "fv3" ]; then echo " ***** GFS-FV3 SHIPS RUN COMPLETED *****" >> $LOG fi #if [ ${2} -eq 2 ]; then if [ "${2}" = "ecm" ]; then echo " ***** ECMWF SHIPS RUN COMPLETED *****" >> $LOG fi echo "-------------------------------------------------------" >> $LOG echo "end time: "`date +"%Y/%m/%d %T %Z doy=(%y%j)"` >> $LOG echo "" >> $LOG #cp -p $LOG $COMOUT/. #cp -p $LOG $ldir cp -p $LOG $ldir rm $LOG # compress $LOG #gzip $LOG # --- End of Model Run --- # # exit with success stat exit 0 #