## This is the system-specific configuration file for the WCOSS Cray [prep_hybrid] threads=24 [wrfexe] minsize=10000 minage=20 [enswrf] minsize=10000 minage=20 [runwrf] minsize=10000 minage=30 [config] ## Project whose disk area should be used. disk_project=hur tape_project=emc-hwrf ## Specify input sources for HISTORY mode. # # Specifies input sources for use by hwrf.input.InputSources. In a # HISTORY mode run, this will be used in the scripts.exhwrf_input job # to pull data. input_sources=wcosscray_sources_{GFSVER} ## Specify the hwrf.input.DataCatalog for FORECAST mode runs. fcst_catalog=wcoss_fcst_{GFSVER} ## Theia CPU account name for submitting jobs to the batch system. cpu_account=HWRF-DEV ## Archive path archive=hpss:/NCEPDEV/{tape_project}/5year/{ENV[USER]}/{SUBEXPT}/{out_prefix}.tar [wcoss_fcst_PROD2019] inputroot=/gpfs/hps3/emc/hwrf/noscrub/input/ ;; Input data root rtofs=/gpfs/dell1/nco/ops/com/rtofs/prod/rtofs.{aYMD}/ ;; RTOFS directory rtofsstage={WORKhwrf}/hycominit/RTOFSDIR ww3=/gpfs/dell1/nco/ops/com/wave/prod/multi_1.{aYMD}/ ;; WAVE directory dcom=/gpfs/dell1/nco/system.conf.wcoss2crayops/dcom/prod ;; DCOM directory gfs=/gpfs/dell1/nco/ops/com/gfs/prod/gfs.{aYMD}/{aHH} ;; GFS directory gdas1=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.{aYMD}/{aHH} ;; GDAS directory gefs=/gpfs/dell2/nco/ops/com/gens/prod/gefs.{aYMD}/{aHH}/ ;; GEFS directory enkf=/gpfs/dell1/nco/ops/com/gfs/prod/enkfgdas.{aYMD}/{aHH}/ ;; GFS ENKF directory messages=/gpfs/hps/nco/ops/com/hur/prod/inphwrf/ ;; hurricane message file directory syndatdir=/gpfs/dell1/nco/ops/com/gfs/prod/syndat/ ;; syndat tcvitals directory loopdata={inputroot}/LOOP-CURRENT/ ;; loop current directory hd_obs={inputroot}/RECON/gdas.{aYMD}/ ;; hd obs directory nexrad_obs={inputroot}/NEXRAD/nexrad.{aYMDH}/ ;; nexrad_obs data directory tdr={inputroot}/TDR/{aYYYY}/{aYMDH}/{vit[stnum]:02d}{vit[basin1lc]}/ ;; TDR data directory @inc=gfs2019_naming,para_loop_naming,prod_gefs_naming,rtofs_naming,ww3_naming [wcoss_fcst_PROD2020] inputroot=/lfs/h2/emc/hur/noscrub/input ;; Input data root rtofs=/lfs/h1/ops/canned/com/rtofs/v2.2/rtofs.{aYMD} rtofsstage={WORKhwrf}/hycominit/RTOFSDIR gfswave=/lfs/h1/ops/canned/com/gfs/v16.2/gfs.{aYMD}/{aHH}/wave/station ;; WAVE directory gfswave=/lfs/h1/ops/canned/com/gfs/v16.2/gfs.{aYMD}/{aHH}/wave/restart ;; WAVE directory dcom=/lfs/h1/ops/prod/dcom ;; DCOM directory gfs=/lfs/h1/ops/canned/com/gfs/v16.2/gfs.{aYMD}/{aHH}/atmos ;; GFS directory gdas1=/lfs/h1/ops/canned/com/gfs/v16.2/gdas.{aYMD}/{aHH}/atmos ;; GDAS directory gefs=/lfs/h1/ops/canned/com/gfs/v16.2/gefs.{aYMD}/{aHH}/ ;; GEFS directory enkf=/lfs/h1/ops/canned/com/gfs/v16.2/enkfgdas.{aYMD}/{aHH}/atmos ;; GFS ENKF directory messages=/lfs/h1/ops/canned/com/hur/inphwrf ;; hurricane message file directory syndatdir=/lfs/h1/ops/canned/com/gfs/v16.2/syndat ;; syndat tcvitals directory loopdata={inputroot}/LOOP-CURRENT/ ;; loop current directory hd_obs={inputroot}/RECON/gdas.{aYMD}/ ;; hd obs directory nexrad_obs={inputroot}/NEXRAD/nexrad.{aYMDH}/ ;; nexrad_obs data directory tdr={inputroot}/TDR/{aYYYY}/{aYMDH}/{vit[stnum]:02d}{vit[basin1lc]}/ ;; TDR data directory tempdropso={inputroot}/TEMPDROP/{aYMDH}/ @inc=gfs2020_naming,para_loop_naming,prod_gefs_naming,rtofs_naming,ww3_naming [hwrfdata] inputroot=/lfs/h2/emc/{disk_project}/noscrub/{ENV[USER]}/hwrfdata_{GFSVER} [dir] utilexec=/apps/ops/prod/libs/intel/19.1.3.304/grib_util/1.2.3/bin ;; /nwprod/util/exec directory utilscript=/ ;; unused /nwprod/util/ush directory (must exist) ## Non-scrubbed directory for track files, etc. Make sure you edit this. CDNOSCRUB=/lfs/h2/emc/{disk_project}/noscrub/{ENV[USER]}/trak ## Scrubbed directory for large work files. Make sure you edit this. # # This is the area where HWRF will actually run. Due to the design of # the WCOSS Cray compute and disk, it is critical that you use one # of two areas: # - /gpfs/hps/ptmp # - /gpfs/hps/stmp # CDSCRUB=/lfs/h2/emc/ptmp/{ENV[USER]} ## Save directory. Make sure you edit this. CDSAVE=/lfs/h2/emc/{disk_project}/noscrub/{ENV[USER]}/save ## Syndat directory for finding which cycles to run syndat=/lfs/h2/emc/hur/noscrub/input/SYNDAT-PLUS ## Used when parsing hwrf_holdvars.txt to make storm*.holdvars.txt in COM [holdvars] WHERE_AM_I=wcoss2cray ;; Which cluster? (For setting up environment.) WHICH_JET=none ;; Which part of Jet are we on? None; we are not on Jet. [wrfexe] nio_groups=2 ;; Number of WRF I/O server groups per domain nio_tasks_per_group=8,8,8 ;; Number of I/O servers per group poll_servers=yes ;; Turn on server polling if quilt servers are used (They are not.) nproc_x=6 ;; WRF processor count in X direction (-1 = automatic) nproc_y=8 ;; WRF processor count in Y direction (-1 = automatic) [runwrf] nio_groups=1 ;; Number of WRF I/O server groups per domain nio_tasks_per_group=4,4,4 ;; Number of I/O servers per group poll_servers=yes ;; Turn on server polling if quilt servers are used (They are not.) nproc_x=24 ;; WRF processor count in X direction (-1 = automatic) nproc_y=40 ;; WRF processor count in Y direction (-1 = automatic) wrf_compute_ranks=960 ;; Number of WRF compute ranks (not I/O servers) ww3_ranks=240 ;; number of wavewatch3 ranks wm3c_ranks=2 ;; Number of coupler ranks