#============================================== # run.ver # 05/10/2022 NWM v3.0 #============================================== #---------------------------------------------- # Set NWM version number #---------------------------------------------- export nwm_ver=v3.0.13 export nwmVer=$(echo ${nwm_ver} | grep -Eo 'v[0-9]\.[0-9]+') export nwmVer1=$(echo ${nwm_ver} | grep -Eo '[0-9]\.[0-9]+') export MPMD=on #---------------------------------------------- # WCOSS softwares versions #---------------------------------------------- export envvar_ver=1.0 export prod_envir_ver=2.0.6 export prod_util_ver=2.0.14 export PrgEnv_intel_ver=8.2.0 export intel_ver=19.1.3.304 export gcc_ver=11.2.0 export cray_mpich_ver=8.1.12 export craype_ver=2.7.13 export pals_ver=1.1.3 export cfp_ver=2.0.4 export netcdf_ver=4.7.4 export python_ver=3.8.6 export grib2_ver=2.0.8 export hdf5_ver=1.10.6 export iobuf_ver=2.0.10 export nco_ver=4.9.7 export gsl_ver=2.7 export udunits_ver=2.2.28 #export prod_envir_ver=2.0.6 #export prod_util_ver=2.0.13 #---------------------------------------------- #Input sources version #---------------------------------------------- export cfs_ver=v2.3 export gfs_ver=v16.3 export nam_ver=v4.2 export hiresw_ver=v8.1 export rap_ver=v5.1 export hrrr_ver=v4.1 export pcpanl_ver=v4.1 export blend_ver=v4.2 export estofs_ver=v2.1 export stofs_ver=v2.1 #export psurge_ver=v2.10 export psurge_ver=v3.0 #---------------------------------------------- # mpiexec and cfp commands #---------------------------------------------- export MPICOMMAND="mpiexec -n ${NPROCS} --cpu-bind core " export MPICOMMAND2="mpiexec -n ${NPROCS} --depth 2 --cpu-bind depth " export MPICOMMAND3="mpiexec -n 1 --cpu-bind core " export CFPCOMMAND="mpiexec -np ${NPROCS} --cpu-bind core cfp" #---------------------------------------------- # Set input forcing types (NETCDF or GRIB2) #---------------------------------------------- export InputDataFormat=GRIB2 #export InputDataFormat=NETCDF #---------------------------------------------- # Use NCO upstream canned data #---------------------------------------------- #export DCOMROOT=/lfs/h1/ops/canned/dcom #export comDir=/lfs/h1/ops/canned/com #export COMPATH=$comDir/nam:$comDir/hiresw:$comDir/gfs:$comDir/cfs:$comDir/rap:$comDir/hrrr:$comDir/pcpanl #export SENDDBN=NO #export SENDDBN_NTC=NO #export PRECONVERSION_MAX_DAYS=200 #---------------------------------------------- # Testing to address long run of medium_range # To use a better memory management library (Intel compiler shortcoming) set these #---------------------------------------------- #export MALLOC_MMAP_MAX_=0 #export MALLOC_TRIM_THRESHOLD_=134217728 #export FOR_DISABLE_KMP_MALLOC=TRUE #---------------------------------------------- # To speed up many-to-one communication pattern using MPI_ISEND -> MPI_RECV calls #---------------------------------------------- #export FI_OFI_RXM_BUFFER_SIZE=128000 #export FI_OFI_RXM_RX_SIZE=64000 #---------------------------------------------- # To speed up MPI_Bcast messages add these env variables to run script #---------------------------------------------- #export MPICH_OFI_STARTUP_CONNECT=1 #export MPICH_COLL_OPT_OFF=MPI_Bcast #export MPICH_COLL_SYNC=MPI_Bcast #export DCOMROOT=/lfs/h1/ops/prod/dcom #export COMPATH=/lfs/h1/ops/prod/com/gfs:/lfs/h1/ops/prod/com/rap:/lfs/h1/ops/prod/com/hrrr:/lfs/h1/ops/prod/com/hiresw:/lfs/h1/ops/prod/com/nam:/lfs/h1/ops/prod/com/stofs:/lfs/h1/ops/prod/com/psurge:/lfs/h1/ops/prod/com/blend:/lfs/h1/ops/prod/com/cfs:/lfs/h1/ops/prod/com/pcpanl export maillist1="nco.spa@noaa.gov,sdm@noaa.gov,ncep.sos@noaa.gov,idp-support@noaa.gov,Cham.Pham@noaa.gov,Brian.Cosgrove@noaa.gov,Donald.W.Johnson@noaa.gov" export maillist2="nco.spa@noaa.gov,Cham.Pham@noaa.gov,Brian.Cosgrove@noaa.gov,Donald.W.Johnson@noaa.gov" export maillist3="nco.spa@noaa.gov,Cham.Pham@noaa.gov,Brian.Cosgrove@noaa.gov"