#PBS -N hwrf%STORMNUM%_merge_%CYC% #PBS -j oe #PBS -S /bin/bash #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 #PBS -l select=1:mpiprocs=1:ompthreads=24:ncpus=24 #PBS -l debug=true export NODES=1 export TOTAL_TASKS=24 model=hwrf %include %include export cyc="%CYC%" export storm_num="%STORMNUM%" # versions file for hwrf sets $model_ver and $code_ver module use ${HOMEhwrf}/modulefiles/ module load HWRF/runensda module list ${HOMEhwrf}/jobs/JHWRF_MERGE %include %manual TASK merge NOTE: The forecast job cannot run unless either the gsi/merge job or the init/relocate_gfs job have completed. PURPOSE: Merges the output of gsi_d02 and gsi_d03 back to the input fields that will be used by the atmospheric component of the forecast. NOTE: This job is skipped if gsi_d02 or gsi_d03 fail. It is also skipped if the launch or bufrprep jobs request that the GSI be skipped. TROUBLESHOOTING: All errors seen in the 2016 HWRF pre-implementation testing were due to node crashes, filesystem problems or quota violations which caused data corruption. Check for node and filesystem problems first. If all hardware is running well, then the problem may be a genuine code or script issue. This job is made up of many tiny fortran programs with limited error checking. Usually if one fails, the problem was actually in an earlier one, or possibly in an earlier job. Detecting which one is the source of the trouble requires expert help. Please call on-call EMC staff for assistance. MANUAL BYPASS OPTION: It is possible to manually bypass failures of this job by editing the $COMOUT/storm$num.conf, and setting run_gsi=no. Then clear the Gsi event in the launch job. The forecast should start without data assimilation. %end