## This is a UNIX conf file that contains all information relating to
# the HWRF configuration.  UNIX conf is used because of how easy it is
# to parse (even GrADS can do it).  The syntax:
#
#      [section]
#      var = value
#
# For generation of namelists for WRF, WPS and other Fortran programs,
# we use this syntax:
#
#     [section]
#     namelist.nlvar = value
#
# to set the value of namelist &namelist's nlvar variable.  Also, the
# special variable "namelist" lists additional conf sections to
# recurse into to get more namelist variables after the current conf
# section is parsed.  Any variable will only be set once: the first
# time it is seen.

## Sets basic configuration options used by all components.
#
# This section sets basic configuration options used by all components.  
# Several special variables in this section are set by the HWRFConfig 
# object itself, which will overwrite them if they're set in this
# file:
# * YMDHM = analysis time (201304261830 = April 26, 2013, 18:30 UTC)
# * YMDH = analysis time excluding minute (2013042618)
# * YMD = analysis time, excluding hour and minute
# * year, YYYY = analysis time's year (ie.: 2013)
# * YY = last two digits of year
# * century, CC = first two digits of year
# * month, MM = analysis time's month (ie.: 04)
# * day, DD = analysis time's day (ie.: 26)
# * hour, cyc, HH = analysis time's hour (ie.: 18)
# * minute, min = analysis time's minute (ie.: 30)
#
# There may be additional variables depending on what subclass (if
# any) of the HWRFConfig is used.  You must specify the mandatory EXPT
# value, which is the name of the experiment to run.
[config]
EXPT=HWRF ;; Experiment name, used for finding installation locations

## Section that specifies the configuration for the forecast job.
forecast_section=runwrf

# SUBEXPT={EXPT}
# storm={ENV[STORM]}
# stnum={ENV[stormenv]}
# basin1=l
# stormlat=31.5
# stormlon=-73.7
# domlat=25.0
# domlon=-75.3

## The storm label: storm1, storm2, etc.
#
# This string is always "storm" followed by a single-digit number.  It
# is used by NCO to decide where certain files and directories are
# located.  It also gives automation systems a way of determining
# storm filenames without having to know the storm name or ID.
stormlabel=storm{storm_num}

# Similar to stormlabel, except it currently is not used by NCO.
# Useful when running multiple storms in a workflow.
global_stormlabel=storm{global_storm_num}

## Where to put the HWRF database file
#
# Location for the HWRF database file.  Note that there must be only
# one of these per workflow (storm, cycle).  The HWRFConfig class will
# also set the dsetfile variable if it is not already set:
datastore={WORKhwrf}/hwrf_state.sqlite3

## The main configuration file.
CONFhwrf={com}/{stormlabel}.conf

ENS=99   ;; The ensemble number; do not change.  The laucher sets it.
ensize=0 ;; Ensemble size; do not change.  The parm/hwrf_ensemble_2014.conf overrides it.

GFSVER=PROD2019 ;; GFS version: PROD2012 or PROD2014

## Configure the relocation jobs
[relocate]
#scrub=no   ; disable scrubbing of relocation directories
initopt=0 ;; 0: full vortex initialization, 1: relocation only
tdrconditionalvinit=no ;; if yes, relocation only when TDR data available

## Configure the merge job.
[merge]
# Nothing needed here yet
# scrub=no   ; disable scrubbing of merge directory
blend_wmax=32.0 ;; the maximum wind threshold used to turn on blending 

[wvstatus]
## The name of the wave status file in the com directory:
wvstatus={com}/{stormlabel}.wave_status

## Second wave file name that mimmics the old operational ocean status file.
#
# This second wave file uses a filename that cannot be predicted in
# advanced.  That breaks ecFlow and Rocoto.  The file is still
# created for backward compatibility.
wvstatus2={com}/wave_status.{vit[stormname]}{vit[stnum]:02d}{vit[basin1lc]}.{cycle}

## Ocean status file specification.
[ocstatus]
## The name of the ocean status file in the com directory:
ocstatus={com}/{stormlabel}.ocean_status

## Operational name of the ocean status file
#
# Alternative name of the ocean status file, used by the downstream
# GFDL hurricane model.  That name is not presently used by the HWRF
# Rocoto workflow because it cannot be predicted in advance.
ocstatus2={com}/ocean_status.{vit[stormname]}{vit[stnum]:02d}{vit[basin1lc]}.{cycle}

## Configure file and directory paths
[dir]
utilexec={HOMEhwrf}/nwport/util/exec ;; utilities ported from WCOSS
statusfile={WORKhwrf}/{stormlabel}.{YMDH} ;; cycle status file
intercom={WORKhwrf}/intercom  ;; dir for communicating data files between jobs
lockdir={WORKhwrf}/lock       ;; lock files for post-processing
INPUThwrf=./ ; {WORKhwrf}/input    ;; parent model, obs, etc.
SPINUP_DATA={WORKhwrf}/OCEAN/ ;; Ocean spin-up data directory

geog_data={FIXhwrf}/hwrf_wps_geo/ ;; Geographic input data
PARMhycom={PARMhwrf}/hycom/  ;; hycom parameter files
PARMww3={PARMhwrf}/ww3/  ;; wavewatch3 parameter files
FIXgsi={PARMhwrf}/hwrf-gsi/   ;; GSI input data for everything except CRTM
FIXcrtm={FIXhwrf}/hwrf-crtm-2.2.6/ ;; GSI CRTM input data
FIXww3={FIXhwrf}/hwrf-ww3/ ;; wavewatch3 fix files

## Domain center location file in COM.
#
# This is the full path to the domain center location file, which MUST
# be in com.  It is used to determine whether a cycle has a com
# directory:
domlocfile={com}/{vit[stnum]:02d}{vit[basin1lc]}.{vit[YMDH]}.domain.center

## The name of the gsi status file in the com directory
gsistatus={stormlabel}.gsi_status

## Operational name of the gsi status file
gsistatus2=gsi_status.{vit[stormname]}{vit[stnum]:02d}{vit[basin1lc]}.{cycle}

## File to check in a prior cycle's com, to see if the cycle exists.
#
# File to use to check if a prior cycle exists for a given storm.
# This only applies to the single storm HWRF.  It should not use the
# vit[] variable; instead, use oldvit[].
HISTCHECK={oldcom}/{oldvit[stnum]:02d}{oldvit[basin1lc]}.{oldvit[YMDH]}.domain.center

## Executable program locations
[exe]
satgrib2={EXEChwrf}/hwrf_satgrib2 ;; Converts hwrfsat files to grib2
hwrf_regrid_merge={EXEChwrf}/hwrf_regrid_merge ;; Replacement for copygb which performs multiple operations at once

hwrf_pom_archv2data3z = {EXEChwrf}/hwrf_pom_archv2data3z
hwrf_pom_hycom2raw = {EXEChwrf}/hwrf_pom_hycom2raw

wgrib2={EXEChwrf}/hwrf_wgrib2  ;; wgrib2 GRIB2 indexing and manipulation program
nco_wgrib2={utilexec}/wgrib2  ;;  wgrib2 from NCO utilexec
cnvgrib={EXEChwrf}/hwrf_cnvgrib  ;; cnvgrib GRIB1/2 conversion program

wgrib={utilexec}/wgrib  ;; wgrib GRIB1 indexing and manipulation program
grbindex={utilexec}/grbindex ;; GRIB1 binary index generation program
grb2index={utilexec}/grb2index ;; GRIB2 binary index generation program
mpiserial={EXEChwrf}/mpiserial ;; Executes serial programs via MPI

# tar/htar/hsi: These three are not used in EMC-maintained production
# jobs since NCO maintains ksh-based archiving jobs.  When EMC runs,
# we get these from the $PATH:
tar=tar    ;; GNU Tar
htar=htar  ;; HTAR tape archiving program
hsi=hsi    ;; hsi tape manipulation program

# The rest of these are compiled by the HWRF sorc/ build system:

# HYCOM executables:
hwrf_get_rtofs={EXEChwrf}/hwrf_get_rtofs
hwrf_rtofs_subregion={EXEChwrf}/hwrf_rtofs_subregion
hwrf_isubregion2avg={EXEChwrf}/hwrf_isubregion2avg
hwrf_rtofs_hat10_forecast={EXEChwrf}/hwrf_rtofs_hat10_forecast
hwrf_rtofs_hep20_forecast={EXEChwrf}/hwrf_rtofs_hep20_forecast
hwrf_rtofs_hin40_forecast={EXEChwrf}/hwrf_rtofs_hin40_forecast
hwrf_rtofs_hsn50_forecast={EXEChwrf}/hwrf_rtofs_hsn50_forecast
hwrf_rtofs_hsp60_forecast={EXEChwrf}/hwrf_rtofs_hsp60_forecast
hwrf_rtofs_hwp30_forecast={EXEChwrf}/hwrf_rtofs_hwp30_forecast
hwrf_rtofs_hcp70_forecast={EXEChwrf}/hwrf_rtofs_hcp70_forecast
ofs_getkpds={EXEChwrf}/ofs_getkpds
hwrf_gfs2ofs2={EXEChwrf}/hwrf_gfs2ofs2
hwrf_ofs_timeinterp_forcing={EXEChwrf}/hwrf_ofs_timeinterp_forcing
ofs_correct_forcing={EXEChwrf}/ofs_correct_forcing
hwrf_archv2restart={EXEChwrf}/hwrf_archv2restart
hwrf_rtofs_restart2restart={EXEChwrf}/hwrf_rtofs_restart2restart
hwrf_ofs_fwind={EXEChwrf}/hwrf_ofs_fwind
hwrf_ofs_wind2hycom={EXEChwrf}/hwrf_ofs_wind2hycom
hwrf_ofs_correct_wind={EXEChwrf}/hwrf_ofs_correct_wind
ofs_latlon={EXEChwrf}/ofs_latlon
hwrf_ofs_archv2data2d={EXEChwrf}/hwrf_ofs_archv2data2d
hwrf_ofs_archv2data3z={EXEChwrf}/hwrf_ofs_archv2data3z
# HYCOM executables needed by POM RTOFS initialization
hwrf_hycom2raw={EXEChwrf}/hwrf_hycom2raw

# Wavewatch3 executables:

ww3_grid = {EXEChwrf}/hwrf_ww3_grid
ww3_strt = {EXEChwrf}/hwrf_ww3_strt
ww3_prep = {EXEChwrf}/hwrf_ww3_prep
ww3_outf = {EXEChwrf}/hwrf_ww3_outf
ww3_outp = {EXEChwrf}/hwrf_ww3_outp
ww3_trck = {EXEChwrf}/hwrf_ww3_trck
ww3_grib = {EXEChwrf}/hwrf_ww3_grib
ww3_gspl = {EXEChwrf}/hwrf_ww3_gspl 
ww3_gint = {EXEChwrf}/hwrf_ww3_gint
gx_outf = {EXEChwrf}/hwrf_gx_outf
gx_outp = {EXEChwrf}/hwrf_gx_outp
ww3_systrk = {EXEChwrf}/hwrf_ww3_systrk
ww3_bound = {EXEChwrf}/hwrf_ww3_bound
ww3_shel = {EXEChwrf}/hwrf_ww3_shel
ww3_multi = {EXEChwrf}/hwrf_ww3_multi
ww3_sbs1 = {EXEChwrf}/hwrf_ww3_sbs1
ww3_prnc = {EXEChwrf}/hwrf_ww3_prnc
ww3_ounf = {EXEChwrf}/hwrf_ww3_ounf
ww3_ounp = {EXEChwrf}/hwrf_ww3_ounp
ww3_bounc = {EXEChwrf}/hwrf_ww3_bounc

# POM executables:

hwrf_ocean_fcst={EXEChwrf}/hwrf_ocean_fcst ;; POM 3D forecast program
hwrf_ocean_init={EXEChwrf}/hwrf_ocean_init ;; POM 3D init program
hwrf_ocean_pomprep_fb={EXEChwrf}/hwrf_ocean_pomprep_fb ;; POM FB prep
hwrf_ocean_pomprep_g3={EXEChwrf}/hwrf_ocean_pomprep_g3 ;; POM G3 prep
hwrf_ocean_pomprep_id={EXEChwrf}/hwrf_ocean_pomprep_id ;; POM ID prep
hwrf_ocean_pomprep_na={EXEChwrf}/hwrf_ocean_pomprep_na ;; POM NA prep
hwrf_ocean_pomprep_rt={EXEChwrf}/hwrf_ocean_pomprep_rt ;; POM RTOFS initializtion prep program
hwrf_ocean_transatl06prep={EXEChwrf}/hwrf_ocean_transatl06prep ;; pom trans-atlantic prep
hwrf_getsst={EXEChwrf}/hwrf_getsst ;; Obtains GFS SST for POM
hwrf_sharp_mcs_rf_l2m_rmy5={EXEChwrf}/hwrf_sharp_mcs_rf_l2m_rmy5 ;; POM loop current feature initialization


## Atmosphere and utilities:

gsi={EXEChwrf}/hwrf_gsi  ;; GSI data assimilation 
enkf={EXEChwrf}/hwrf_enkf ;; EnKF data assimilation
post={EXEChwrf}/hwrf_post ;; Unified Post Processor
copygb={EXEChwrf}/hwrf_egrid2latlon ;; Copygb with bug fixes for E grid
tave={EXEChwrf}/hwrf_tave ;; Tracker TAVE pre-processor
vint={EXEChwrf}/hwrf_vint ;; Tracker VINT pre-processor
gettrk={EXEChwrf}/hwrf_unified_tracker ;; GFDL Vortex Tracker
hwrf_nhc_products={EXEChwrf}/hwrf_nhc_products ;; hwrf_nhc_products special product generator
hwrf_prep={EXEChwrf}/hwrf_prep ;; HWRF spectral to grid transformation program
real_nmm={EXEChwrf}/hwrf_real_nmm ;; WRF-NMM real case preparation program
swcorner_dynamic={EXEChwrf}/hwrf_swcorner_dynamic ;; sets i and j parent start in WRF namelists
wrf={EXEChwrf}/hwrf_wrf ;; WRF-NMM with HWRF mode
wrfout_newtime={EXEChwrf}/hwrf_wrfout_newtime ;; Modifies WRF NetCDF file times

hwrf_metgrid_levels={EXEChwrf}/hwrf_metgrid_levels ;; Get the metgrid level info from a NetCDF metgrid file

hwrf_wm3c={EXEChwrf}/hwrf_wm3c ;; NCEP Coupler

hwrf_geogrid={EXEChwrf}/hwrf_geogrid ;; WPS Geogrid program
hwrf_ungrib={EXEChwrf}/hwrf_ungrib ;; WPS Ungrib program
hwrf_metgrid={EXEChwrf}/hwrf_metgrid ;; WPS metgrid program
hwrf_3dvar={EXEChwrf}/hwrf_diffwrf_3dvar ;; Vortex get/paste program for NetCDF files
hwrf_final_merge={EXEChwrf}/hwrf_final_merge ;; Final Merge program for hwrf multistorm
hwrf_bin_io={EXEChwrf}/hwrf_bin_io ;; Vortex get/paste program for WRF binary files
hwrf_merge_nest={EXEChwrf}/hwrf_merge_nest_4x_step12_3n ;; Vortex relocation merge_nest program
hwrf_merge_enkf={EXEChwrf}/hwrf_merge_nest_4x_step12_enkf ;; Ensemble domain merge_enkf program
hwrf_trk_guess={EXEChwrf}/hwrf_create_trak_guess ;; Vortex relocation trak_guess program
hwrf_wrf_split={EXEChwrf}/hwrf_split1 ;; Vortex relocation hwrf_split program
hwrf_pert_ct={EXEChwrf}/hwrf_pert_ct1 ;; Vortex relocation pert_ct program
hwrf_create_nest={EXEChwrf}/hwrf_create_nest_1x_10m ;; Vortex relocation create_nest_1x_10m program
hwrf_create_trak_fnl={EXEChwrf}/hwrf_create_trak_fnl ;; Vortex relocation create_trak_fnl program
hwrf_merge_nest={EXEChwrf}/hwrf_merge_nest_4x_step12_3n ;; Vortex relocation hwrf_merge_nest program
hwrf_anl_4x={EXEChwrf}/hwrf_anl_4x_step2 ;; Vortex relocation anl_4x program
hwrf_anl_cs={EXEChwrf}/hwrf_anl_cs_10m ;; Vortex relocation anl_cs program
hwrf_anl_bogus={EXEChwrf}/hwrf_anl_bogus_10m ;; Vortex relocation anl_bogus program
hwrf_inter_2to1={EXEChwrf}/hwrf_inter_2to1 ;; Vortex relocation 2to1 interpolator
hwrf_inter_2to6={EXEChwrf}/hwrf_inter_2to6 ;; Vortex relocation 2to6 interpolator
hwrf_inter_2to2={EXEChwrf}/hwrf_inter_2to2 ;; Vortex relocation 2to2 interpolator
hwrf_inter_4to2={EXEChwrf}/hwrf_inter_4to2 ;; Vortex relocation 4to2 interpolator
hwrf_inter_4to6={EXEChwrf}/hwrf_inter_4to6 ;; Vortex relocation 4to6 interpolator

hwrf_blend_gsi={EXEChwrf}/hwrf_blend_gsi ;; Vortex merge hwrf_blend_gsi program
hwrf_readtdrstmid={EXEChwrf}/hwrf_readtdrstmid ;; TDR storm ID reader
hwrf_readtdrtime={EXEChwrf}/hwrf_readtdrtime ;; TDR time reader
hwrf_readtdrtrigger={EXEChwrf}/hwrf_readtdrtrigger ;; TDR trigger processor
hwrf_rem_prepbufr_typ_in_circle={EXEChwrf}/hwrf_rem_prepbufr_typ_in_circle ;; hwrf.bufrprep rem_prepbufr_typ_in_circle program
hwrf_change_prepbufr_qm_in_circle={EXEChwrf}/hwrf_change_prepbufr_qm_in_circle ;; hwrf.bufrprep change_prepbufr_qm_in_circle program
hwrf_change_prepbufr_qm_typ={EXEChwrf}/hwrf_change_prepbufr_qm_typ ;; hwrf.bufrprep change_prepbufr_qm_typ program
hwrf_ensemble={EXEChwrf}/hwrf_ensemble
hwrf_enkf={EXEChwrf}/hwrf_enkf
hwrf_interpolate={EXEChwrf}/hwrf_interpolate

# Executable list if you do not run make install
#gsi={HOMEhwrf}/sorc/GSI/run/gsi.exe
#post={HOMEhwrf}/sorc/UPP/bin/unipost.exe
#wgrib={HOMEhwrf}/sorc/hwrf-utilities/exec/wgrib.exe
#copygb={HOMEhwrf}/sorc/UPP/bin/copygb.exe
#cnvgrib={HOMEhwrf}/sorc/UPP/bin/cnvgrib.exe
#tave={HOMEhwrf}/sorc/gfdl-vortextracker/trk_exec/hwrf_tave.exe
#vint={HOMEhwrf}/sorc/gfdl-vortextracker/trk_exec/hwrf_vint.exe
#grbindex={HOMEhwrf}/sorc/hwrf-utilities/exec/grbindex.exe
#gettrk={HOMEhwrf}/sorc/gfdl-vortextracker/trk_exec/hwrf_gettrk.exe
#hwrf_nhc_products={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_nhc_products.exe
#hwrf_prep={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_prep.exe
#mpiserial={HOMEhwrf}/sorc/hwrf-utilities/exec/mpiserial.exe
#real_nmm={HOMEhwrf}/sorc/WRFV3/main/real_nmm.exe
#swcorner_dynamic={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_swcorner_dynamic.exe
#wrf={HOMEhwrf}/sorc/WRFV3/main/wrf.exe
#wrfout_newtime={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_wrfout_newtime.exe
#
#hwrf_ocean_fcst={HOMEhwrf}/sorc/pomtc/ocean_exec/hwrf_ocean_fcst.exe
#hwrf_ocean_init={HOMEhwrf}/sorc/pomtc/ocean_exec/hwrf_ocean_init.exe
#hwrf_ocean_pomprep_fb={HOMEhwrf}/sorc/pomtc/ocean_exec/pomprep_fbtr.xc
#hwrf_ocean_pomprep_g3={HOMEhwrf}/sorc/pomtc/ocean_exec/pomprep_gdm3.xc
#hwrf_ocean_pomprep_id={HOMEhwrf}/sorc/pomtc/ocean_exec/pomprep_idel.xc
#hwrf_ocean_pomprep_na={HOMEhwrf}/sorc/pomtc/ocean_exec/pomprep_ncda.xc
#hwrf_ocean_transatl06prep={HOMEhwrf}/sorc/pomtc/ocean_exec/transatl06prep.xc
#hwrf_getsst={HOMEhwrf}/sorc/pomtc/ocean_exec/gfdl_getsst.exe
#hwrf_sharp_mcs_rf_l2m_rmy5={HOMEhwrf}/sorc/pomtc/ocean_exec/gfdl_sharp_mcs_rf_l2m_rmy5.exe
#
#hwrf_wm3c={HOMEhwrf}/sorc/ncep-coupler/cpl_exec/hwrf_wm3c.exe
#
#hwrf_geogrid={HOMEhwrf}/sorc/WPSV3/geogrid.exe
#hwrf_ungrib={HOMEhwrf}/sorc/WPSV3/ungrib.exe
#hwrf_metgrid={HOMEhwrf}/sorc/WPSV3/metgrid.exe
#hwrf_3dvar={HOMEhwrf}/sorc/hwrf-utilities/exec/diffwrf_3dvar.exe
#hwrf_bin_io={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_bin_io.exe
#hwrf_merge_nest={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_merge_nest_4x_step12_3n.exe
#hwrf_trk_guess={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_create_trak_guess.exe
#hwrf_wrf_split={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_split1.exe
#hwrf_pert_ct={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_pert_ct1.exe
#hwrf_create_nest={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_create_nest_1x_10m.exe
#hwrf_create_trak_fnl={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_create_trak_fnl.exe
#hwrf_anl_4x={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_anl_4x_step2.exe
#hwrf_anl_cs={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_anl_cs_10m.exe
#hwrf_anl_bogus={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_anl_bogus_10m.exe
#hwrf_inter_2to1={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_inter_2to1.exe
#hwrf_inter_2to6={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_inter_2to6.exe
#hwrf_inter_2to2={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_inter_2to2.exe
#hwrf_inter_4to2={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_inter_4to2.exe
#hwrf_inter_4to6={HOMEhwrf}/sorc/hwrf-utilities/exec/hwrf_inter_4to6.exe

# -----------------------------------------------------------------------
# Preprocessing configuration.
# -----------------------------------------------------------------------

## Configures the wavewatch3 initialization
[ww3init]
scrub = no                           ;; scrub temporary files?
catalog = {input_catalog}            ;; where to get input data
# Subsection for ww3 pre task
usegfswind = yes                     ;; Time-updated GFS forcing outside HWRF domain?
input_step = 21600                   ;; Timestep between forcing updates
gfs_dataset = gfs                    ;; Dataset for GFS forcing
gfs_item = gfs_gribA                 ;; Data item for GFS forcing
ww3_dataset = ww3                    ;; Dataset for wave boundary condition from global wave multi_1
ww3bdy_item = ww3bdy_spec            ;; Data item for wave boundary condition from global wave multi_1
ww3rst_item = ww3rst_glo_30m         ;; Data item for wave initial condition from global wave multi_1

ww3_bdy = yes                        ;; Use wave boundary condition from NCEP global wave multi_1
ww3_rst = yes                        ;; Option controlling how to use initial wave condition from NCEP global wave multi_1
#                                       yes: use initial wave condition from NCEP global wave multi_1 for cold start forecast cycles
#                                       always: use initial wave condition from NCEP global wave multi_1 for all forecast cycles

grid_glo_30m_inp = {PARMww3}/ww3_grid_glo_30m.inp
grid_inp = {PARMww3}/ww3_grid_{vit[basin1lc]}.inp
grid_bot = {FIXww3}/ww3_grid_{vit[basin1lc]}.bot
grid_msk = {FIXww3}/ww3_grid_{vit[basin1lc]}.msk
grid_msk2 = {FIXww3}/ww3_grid_{vit[basin1lc]}.msk2
grid_obr = {FIXww3}/ww3_grid_{vit[basin1lc]}.obr
gint_inp = {PARMww3}/ww3_gint.inp_tmpl
wind_inp = {PARMww3}/ww3_prep_WNDDummy.inp
prnc_inp_gfswind = {PARMww3}/ww3_prnc_gfswind.inp
curr_inp = {PARMww3}/ww3_prep_CURDummy.inp
strt_inp = {PARMww3}/ww3_strt.inp
bound_inp = {PARMww3}/ww3_bound_{vit[basin1lc]}.inp
shel_inp = {PARMww3}/ww3_shel.inp_tmpl
ounf_inp = {PARMww3}/ww3_ounf.inp_tmpl
ounp_spec_inp = {PARMww3}/ww3_ounp_spec.inp_tmpl
outp_info_inp = {PARMww3}/ww3_outp_info.inp_tmpl
outp_bull_inp = {PARMww3}/ww3_outp_bull.inp_tmpl
outp_spec_inp = {PARMww3}/ww3_outp_spec.inp_tmpl
grib_inp = {PARMww3}/ww3_grib.inp_tmpl
buoy_inp = {PARMww3}/ww3_buoy.inp

## Configures the hwrf_expt.gfs_init
[gfsinit]
# Subsections to configure each subtask:
geogrid=geogrid ;; section to configure hwrf.wps.Geogrid
metgrid=metgrid ;; section to configure hwrf.wps.Metgrid
realinit=wrfexe ;; section to configure hwrf.fcsttask.RealNMM for init-length runs
realfcst=wrfexe ;; section to configure hwrf.fcsttask.RealNMM for forecast-length runs
wrfanl=wrfexe ;; section to configure hwrf.fcsttask.WRFAnl or hwrf.fcsttask.WRFAnl4Trak
wrfghost=wrfexe ;; section to configure hwrf.fcsttask.WRFGhost or hwrf.fcsttask.WRFGhostForPost
post=nonsatpost ;; section to configure the post for finding the parent model vortex
regribber=regribber ;; section to configure the regribber for finding the parent model vortex
tracker=tracker ;; section to configure the tracker for finding the parent model vortex

## Configures the hwrf_expt.fgat_init.  
[fgat]
## Boundary conditon step:
ibdystep=10800
# FGAT hours:
FGATSTR=-3 ;; FGAT starting hour
FGATINV=3 ;; Step in hours between the FGAT hours
FGATEND=3 ;; FGAT end hour
# Subsections to configure each subtask:
geogrid=geogrid ;; section to configure hwrf.wps.Geogrid
metgrid=metgrid ;; section to configure hwrf.wps.Metgrid
realinit=wrfexe ;; section to configure hwrf.fcsttask.RealNMM for init-length runs
realfcst=wrfexe ;; section to configure hwrf.fcsttask.RealNMM for forecast-length runs
wrfanl=wrfexe ;; section to configure hwrf.fcsttask.WRFAnl or hwrf.fcsttask.WRFAnl4Trak
wrfghost=wrfexe ;; section to configure hwrf.fcsttask.WRFGhost or hwrf.fcsttask.WRFGhostForPost
post=nonsatpost ;; section to configure the post for finding the parent model vortex
regribber=regribber ;; section to configure the regribber for finding the parent model vortex
tracker=tracker ;; section to configure the tracker for finding the parent model vortex
ungrib=fgat_ungrib ;; section to configure hwrf.wps.Ungrib
prep_hybrid=fgat_prep_hybrid ;; section to configure hwrf.prep.PrepHybrid

## Configures hwrf.wps.Geogrid geographical data processor.
[geogrid]
redirect = yes ;; Redirect output to per-program log files?
tbl = {PARMhwrf}/hwrf_GEOGRID.TBL ;; Geogrid table file (GEOGRID.TBL)
namelist = wps_namelist ;; Section with the WPS namelist

## Configures hwrf.wps.Ungrib for the hwrf_expt.gfs_init (not FGAT)
[ungrib]
redirect = yes ;; Redirect output to per-program log files?
catalog = {input_catalog} ;; Data catalog for hwrf.input to find files
dataset = gfs ;; Dataset for hwrf.input to find files
tbl = {PARMhwrf}/hwrf_Vtable_gfs2017 ;; The Vtable file for Geogrid
item2_optional=yes ;; Is the second GRIB file type optional?
item = gfs_gribA ;; item (grib file type) for the hwrf.input
item2 = gfs_gribB ;; second GRIB file item for hwrf.input
namelist = wps_namelist ;; Section that defines the WPS namelist

## Subset file for subsetting the GRIB1 data (not used for GRIB2)
subset_grib1 = {PARMhwrf}/hwrf_global_1x1_paramlist.f00

## Ungrib Vtable for 2011 data
# 
# The hwrf.prelaunch will replace the tbl value automatically in the
# launcher job with the tbl2011 field, if the cycle is in 2011.
tbl2011 = {PARMhwrf}/hwrf_Vtable_gfs2012

## Configures hwrf.wps.Ungrib for the FGAT case.
[fgat_ungrib]
item2_optional=yes ;; Is the second GRIB file type optional?
item = gdas1_gribA ;; item (grib file type) for the hwrf.input 
## There is no "B" file for GDAS:
item2 =
namelist = wps_namelist  ;; Section that defines the WPS namelist
@inc = ungrib ;; Include more configuration options from the ungrib section
dataset = gdas1 ;; Dataset for hwrf.input to find files 

## Configures the hwrf.wps.Metgrid
[metgrid]
redirect = yes ;; Use per-program log files
tbl = {PARMhwrf}/hwrf_METGRID.TBL ;; The METGRID.TBL file
namelist = wps_namelist ;; The section that defines the WPS namelist
scrub = no ;; Scrub temporary files?

## Defines the WPS namelist
#
# This section is sent through hwrf.namelist.Conf2Namelist to generate
# the WPS namelist for all WPS components.  See the WPS documentation
# for details on the option meanings.
[wps_namelist]
share.wrf_core = 'NMM',
geogrid.map_proj =  'rotated_ll',
geogrid.geog_data_path = "{FIXhwrf}/hwrf_wps_geo/"
geogrid.opt_geogrid_tbl_path = './'
geogrid.ref_x = 105.0,
geogrid.ref_y = 159.0,
ungrib.out_format = 'WPS',
ungrib.prefix = 'FILE',
metgrid.fg_name = 'FILE',
metgrid.opt_metgrid_tbl_path = './'
mod_levs.press_pa = 201300, 200100, 100000, 95000, 90000, 85000, 80000,
                     75000,  70000,  65000, 60000, 55000, 50000, 45000,
                     40000,  35000,  30000, 25000, 20000, 15000, 10000,
                      5000,   1000,    500,   200

## Configures the prep_hybrid program for the non-FGAT case.
[prep_hybrid]
dataset = gfs ;; The dataset name for hwrf.input
item = gfs_sf ;; The item name for hwrf.input
catalog = {input_catalog} ;; The catalog name for hwrf.input.DataCatalog
namelist = prep_hybrid_namelist ;; The section to use to generate the prep_hybrid namelist
threads=24 ;; Number of threads for running prep_hybrid
imax=1440 ;; The intermediate grid size in the longitude direction.
jmax=721 ;; The intermediate grid size in the latitude direction.

## Configures the prep_hybrid program for the FGAT case.
[fgat_prep_hybrid]
dataset = gdas ;; The dataset name for hwrf.input
item = gdas1_sf ;; The item name for hwrf.input
@inc=prep_hybrid ;; Include the prep_hybrid section for more options.
namelist = prep_hybrid_namelist ;; Use this section for generating the namelist

## Sent through hwrf.namelist.Conf2Namelist for generating the
## prep_hybrid namelist.  See the prep_hybrid documentation for
## details.
[prep_hybrid_namelist]
rgrid.pola = F
rgrid.alonvt = -90.0
rgrid.polei = 0.25
rgrid.polej = 360.
rgrid.xmeshl = 0.25
rgrid.north = F
# prmfld.ntimes = 1 ; set to 1 if it is unset here
# domain.p_top_requested = 50 ; automatically set from WRF namelist
# domain.ptsgm=20000 ; automatically set from WRF namelist
# domain.levels=0.995,... ; automatically set from WRF namelist

## Configure the relocation.
[relocation]
tbl = {FIXhwrf}/hwrf_eta_micro_lookup.dat ;; eta_micro_lookup data file
redirect = true ;; Redirect each program to its own log file?
#scrub=no

## Configure the hwrf.bufrprep
[bufrprep]
# scrub=no   ; disable scrubbing of bufrprep directory
catalog = {input_catalog} ;; The catalog for hwrf.input.DataCatalog
obstypes = hdob_obstype, tdr_new_obstype ;; Section for defining the observation types
bufr_item=gfs_bufr ;; item for bufr files, for the hwrf.input to find them
bufr_dataset=gfs ;; dataset for bufr files, for the hwrf.input to find them
prepbufr_item=gfs_prepbufr_rst ;; item for prepbufr files, for the hwrf.input to find them
## options to preprocess prepbufr file
#
# 0: make no change
# 1: remove some inner-core data
# 2: flag/unflag mass and dropsonde u, v data
# 3: unflag HS3 dropsonde data, then reflag near center to be consistent with other drops
# 4: Option 3 + unflag u/v dropsonde data near center below a certain wind threshold
prepbufrprep=3 ;; parameter used to preprocess prepbufr file
## parameter used to define an area where inner-core data are removed/(un)flagged
#
# radius of a circle centered at TC center
# > 0. remove conventional data, when prepbufrprep=1
#      flag dropsonde wind data, when prepbufrprep=2
# < 0. unflag dropsonde wind data, when prepbufrprep=2
# = 0. no change for dropsonde wind data, when prepbufrprep=2
RRADC=50.
## parameter used to define a square area to flag pressure data
#
# half side length of a squre centered at TC center
# > 0. flag pressure data, when prepbufrprep=2
# <= 0. no change, when prepbufrprep=2
RBLDC=-200. 

#Wind speed threshold for unflagging u/v dropsonde data when prepbufrprep=4
uvunflag_vmax=32.

# -----------------------------------------------------------------------
# GSI CONFIGURATTION
# -----------------------------------------------------------------------

## Configures the GSI for the intermediate resolution domains
[gsi_d02]
# scrub=no   ; disable scrubbing of gsi_d02 directory
redirect=yes ;; Redirect output to per-program log files?
catalog = {input_catalog} ;; section for the hwrf.input.DataCatalog
use_newradbc=yes ;; Use new bias correction data for 2015 GFS and later?
obstypes = hdob_obstype,sat_radiance_obstypes,sat_wnd_obstype,tdr_new_obstype ;; List of obstype sections
bufr_item=gfs_bufr ;; item for bufr files, for the hwrf.input to find them
bufr_dataset=gfs ;; dataset for bufr files, for the hwrf.input to find them
prepbufr_item=gfs_prepbufr_rst ;; item for prepbufr files, for the hwrf.input to find them
nml_file={PARMhwrf}/hwrf_gsi.nml ;; GSI namelist input file
nml_section=gsi_d02_nml ;; Section used to configure the GSI namelist file
diagpre={com}/{out_prefix}.gsi_d02 ;; Filename prefix for GSI output

## Configures the GSI for the innermost resolution domains
[gsi_d03]
# scrub=no   ; disable scrubbing of gsi_d03 directory
redirect=yes ;; Redirect output to per-program log files?
catalog = {input_catalog} ;; section for the hwrf.input.DataCatalog
use_hwrf_ensemble = yes ;; Is the ENSDA in use?
sat_wnd_da = yes ;; Enable assimiation of satellite wind?
sat_radiance_da = yes ;; Enable satellite radiance data assimilation?
use_gfs_stratosphere = yes ;; Use blended global-regional vertical coordinate for satellite radiance DA
use_newradbc = yes ;; Use new bias correction data for 2015 GFS and later?
obstypes = hdob_obstype,sat_radiance_obstypes,sat_wnd_obstype,tdr_new_obstype ;; List of obstype sections
bufr_item=gfs_bufr ;; item for bufr files, for the hwrf.input to find them
bufr_dataset=gfs ;; dataset for bufr files, for the hwrf.input to find them
prepbufr_item=gfs_prepbufr_rst ;; item for prepbufr files, for the hwrf.input to find them
nml_file={PARMhwrf}/hwrf_gsi.nml ;; GSI namelist input file
nml_section=gsi_d03_nml ;; Section used to configure the GSI namelist file
diagpre={com}/{out_prefix}.gsi_d03 ;; Filename prefix for GSI output
use_hwrf_ensemble_wmax=25.0 ;; Use HWRF ensemble for ensemble covariance, when Vmax greater the threshold

## Configures the GSI for the calculation of mean Hx
[gsi_meanhx]
# scrub=no   ; disable scrubbing of meanhx directory
redirect=yes ;; Redirect output to per-program log files?
catalog = {input_catalog} ;; section for the hwrf.input.DataCatalog
sat_wnd_da = yes ;; Enable assimiation of satellite wind?
sat_radiance_da = no ;; Enable satellite radiance data assimilation?
use_gfs_stratosphere = no ;; Use blended global-regional vertical coordinate for satellite radiance DA
use_newradbc = yes ;; Use new bias correction data for 2015 GFS and later?
obstypes = hdob_obstype,sat_radiance_obstypes,sat_wnd_obstype,tdr_new_obstype ;; List of obstype sections
bufr_item=gfs_bufr ;; item for bufr files, for the hwrf.input to find them
bufr_dataset=gfs ;; dataset for bufr files, for the hwrf.input to find them
prepbufr_item=gfs_prepbufr_rst ;; item for prepbufr files, for the hwrf.input to find them
nml_file={PARMhwrf}/hwrf_gsi.nml ;; GSI namelist input file
nml_section=gsi_meanhx_nml ;; Section used to configure the GSI namelist file
ensemble_nml_file={PARMhwrf}/hwrf_ensemble.nml ;; wrf-ensemble namelist input file
ensemble_nml_section=meanhx_ensemble_nml ;; Section used to configure the hwrf_ensemble namelist file
diagpre = {com}/{out_prefix}.gsi_meanhx ;; Filename prefix for GSI output

## Configures the GSI for the calculation of ensemble Hx
[gsi_enshx]
# scrub=no   ; disable scrubbing of enshx directory
redirect=yes ;; Redirect output to per-program log files?
catalog = {input_catalog} ;; section for the hwrf.input.DataCatalog
sat_wnd_da = yes ;; Enable assimiation of satellite wind?
sat_radiance_da = no ;; Enable satellite radiance data assimilation?
use_gfs_stratosphere = no ;; Use blended global-regional vertical coordinate for satellite radiance DA
use_newradbc = yes ;; Use new bias correction data for 2015 GFS and later?
obstypes = hdob_obstype,sat_radiance_obstypes,sat_wnd_obstype,tdr_new_obstype ;; List of obstype sections
bufr_item=gfs_bufr ;; item for bufr files, for the hwrf.input to find them
bufr_dataset=gfs ;; dataset for bufr files, for the hwrf.input to find them
prepbufr_item=gfs_prepbufr_rst ;; item for prepbufr files, for the hwrf.input to find them
nml_file={PARMhwrf}/hwrf_gsi.nml ;; GSI namelist input file
nml_section=gsi_enshx_nml ;; Section used to configure the GSI namelist file
diagpre = {com}/{out_prefix}.gsi_enshx ;; Filename prefix for GSI output

## Used to configure the GSI namelist for d02
[gsi_d02_nml]
# Namelist settings for domain 2 (6km) GSI
HZSCL=0.25,0.5,1.0 ;; background error scale factor for horizontal smoothing
DELTIM=1200 ;; model timestep used for assimilation of precipitation rates
twind=3.0 ;; maximum half time window (hours) for observations
HYBENS_REGIONAL=T ;; logical variable, if .true., then turn on hybrid ensemble option
ENSEMBLE_SIZE_REGIONAL=80 ;; ensemble size
HYBENS_UV_REGIONAL=T ;; if T, then ensemble perturbation wind stored as u,v. if F, streamfunction and velocity potential
BETA_S0=0.2 ;; value between 0 and 1, relative weight given to static background B 
HYBENS_HOR_SCALE_REGIONAL=300 ;; horizontal localization correlation length (km)
HYBENS_VER_SCALE_REGIONAL=-0.5 ;; vertical localization correlation length (>0. grid units, <0. lnp)
READIN_LOCALIZATION=F ;; if T, then read in localization information from external file
GENERATE_ENS_REGIONAL=F ;; if T, generate ensemble perturbations internally as random samples of static B.
## integer, used to select type of ensemble to read in for regional application.
#
# =1: use GEFS internally interpolated to ensemble grid.
# =2: ensembles are WRF NMM format
# =3: ensembles are ARW netcdf format.
# =4: ensembles are NEMS NMMB format.
REGIONAL_ENSEMBLE_OPTION=1
PSEUDO_HYBENS=F ;; if T, use pseudo HWRF (NMM) ensemble
GRID_RATIO_ENS=1 ;; ratio of ensemble grid resolution to analysis grid resolution
MERGE_TWO_GRID_ENSPERTS=F ;; merge ensemble from two nests for HWRF (NMM)
PWGTFLG=F ;; if T, use vertical integration function on ensemble contribution of Psfc
HYBENS_ANISO_REGIONAL=F ;; if T, then use anisotropic recursive filter for localization
WRITE_ENS_SPRD=F ;; if T, write out ensemble spread

## Used to configure the GSI namelist for d03
[gsi_d03_nml]
# Namelist settings for domain 3 (2km) GSI
HZSCL=0.2,0.4,0.8 ;; background error scale factor for horizontal smoothing
DELTIM=1200 ;; model timestep used for assimilation of precipitation rates
twind=3.0 ;; maximum half time window (hours) for observations
HYBENS_REGIONAL=T ;; logical variable, if .true., then turn on hybrid ensemble option
ENSEMBLE_SIZE_REGIONAL=80 ;; ensemble size
HYBENS_UV_REGIONAL=T ;; if T, then ensemble perturbation wind stored as u,v. if F, streamfunction and velocity potential
BETA_S0=0.2 ;; value between 0 and 1, relative weight given to static background B
HYBENS_HOR_SCALE_REGIONAL=150 ;; horizontal localization correlation length (km)
HYBENS_VER_SCALE_REGIONAL=-0.5 ;; vertical localization correlation length (>0. grid units, <0. lnp)
READIN_LOCALIZATION=F ;; if T, then read in localization information from external file
GENERATE_ENS_REGIONAL=F ;; if T, generate ensemble perturbations internally as random samples of static B.
## integer, used to select type of ensemble to read in for regional application.
##
## =1: use GEFS internally interpolated to ensemble grid.
## =2: ensembles are WRF NMM format
## =3: ensembles are ARW netcdf format.
## =4: ensembles are NEMS NMMB format.
REGIONAL_ENSEMBLE_OPTION=1 
PSEUDO_HYBENS=F ;; if T, use pseudo HWRF (NMM) ensemble
GRID_RATIO_ENS=1 ;; ratio of ensemble grid resolution to analysis grid resolution
MERGE_TWO_GRID_ENSPERTS=F ;; merge ensemble from two nests for HWRF (NMM)
PWGTFLG=F ;; if T, use vertical integration function on ensemble contribution of Psfc
HYBENS_ANISO_REGIONAL=F ;; if T, then use anisotropic recursive filter for localization
WRITE_ENS_SPRD=F ;; if T, write out ensemble spread

## Used to configure the GSI namelist for meanhx
[gsi_meanhx_nml]
# Namelist settings for meanhx for enkf domain (6km)
HZSCL=0.25,0.5,1.0 ;; background error scale factor for horizontal smoothing
DELTIM=1200 ;; model timestep used for assimilation of precipitation rates
twind=3.0 ;; maximum half time window (hours) for observations
HYBENS_REGIONAL=F ;; logical variable, if .true., then turn on hybrid ensemble option
ENSEMBLE_SIZE_REGIONAL=80 ;; ensemble size
HYBENS_UV_REGIONAL=T ;; if T, then ensemble perturbation wind stored as u,v. if F, streamfunction and velocity potential
BETA_S0=0.2 ;; value between 0 and 1, relative weight given to static background B
HYBENS_HOR_SCALE_REGIONAL=300 ;; horizontal localization correlation length (km)
HYBENS_VER_SCALE_REGIONAL=-0.5 ;; vertical localization correlation length (>0. grid units, <0. lnp)
READIN_LOCALIZATION=F ;; if T, then read in localization information from external file
GENERATE_ENS_REGIONAL=F ;; if T, generate ensemble perturbations internally as random samples of static B.
## integer, used to select type of ensemble to read in for regional application.
#
# =1: use GEFS internally interpolated to ensemble grid.
# =2: ensembles are WRF NMM format
# =3: ensembles are ARW netcdf format.
# =4: ensembles are NEMS NMMB format.
REGIONAL_ENSEMBLE_OPTION=1
PSEUDO_HYBENS=F ;; if T, use pseudo HWRF (NMM) ensemble
GRID_RATIO_ENS=1 ;; ratio of ensemble grid resolution to analysis grid resolution
MERGE_TWO_GRID_ENSPERTS=F ;; merge ensemble from two nests for HWRF (NMM)
PWGTFLG=F ;; if T, use vertical integration function on ensemble contribution of Psfc
HYBENS_ANISO_REGIONAL=F ;; if T, then use anisotropic recursive filter for localization
WRITE_ENS_SPRD=F ;; if T, write out ensemble spread

## Used to configure the hwrf_ensemble namelist for meanhx
[meanhx_ensemble_nml]
# Nothing needed here yet

## Used to configure the GSI namelist for enshx
[gsi_enshx_nml]
# Namelist settings for enshx for enkf domain (6km)
HZSCL=0.25,0.5,1.0 ;; background error scale factor for horizontal smoothing
DELTIM=1200 ;; model timestep used for assimilation of precipitation rates
twind=3.0 ;; maximum half time window (hours) for observations
HYBENS_REGIONAL=F ;; logical variable, if .true., then turn on hybrid ensemble option
ENSEMBLE_SIZE_REGIONAL=80 ;; ensemble size
HYBENS_UV_REGIONAL=T ;; if T, then ensemble perturbation wind stored as u,v. if F, streamfunction and velocity potential
BETA_S0=0.2 ;; value between 0 and 1, relative weight given to static background B
HYBENS_HOR_SCALE_REGIONAL=300 ;; horizontal localization correlation length (km)
HYBENS_VER_SCALE_REGIONAL=-0.5 ;; vertical localization correlation length (>0. grid units, <0. lnp)
READIN_LOCALIZATION=F ;; if T, then read in localization information from external file
GENERATE_ENS_REGIONAL=F ;; if T, generate ensemble perturbations internally as random samples of static B.
## integer, used to select type of ensemble to read in for regional application.
#
# =1: use GEFS internally interpolated to ensemble grid.
# =2: ensembles are WRF NMM format
# =3: ensembles are ARW netcdf format.
# =4: ensembles are NEMS NMMB format.
REGIONAL_ENSEMBLE_OPTION=1
PSEUDO_HYBENS=F ;; if T, use pseudo HWRF (NMM) ensemble
GRID_RATIO_ENS=1 ;; ratio of ensemble grid resolution to analysis grid resolution
MERGE_TWO_GRID_ENSPERTS=F ;; merge ensemble from two nests for HWRF (NMM)
PWGTFLG=F ;; if T, use vertical integration function on ensemble contribution of Psfc
HYBENS_ANISO_REGIONAL=F ;; if T, then use anisotropic recursive filter for localization
WRITE_ENS_SPRD=F ;; if T, write out ensemble spread

## Defines the hdob obstype
#
# Tells where to get hdob observations, and what to name them in the
# directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[hdob_obstype]
type=hd_ob ;; Name for this type of observation
dataset=hd_obs ;; dataset name for the hwrf.input
item=gdas1_bufr ;; item  name for the hwrf.input
# local dir name = gfs/gdas bufr_d name
hdobbufr=hdob

## Defines the old TDR obstype
#
# Tells where to get old TDR observations, and what to name them in
# the directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[tdr_old_obstype]
type=tdr_old ;; Name for this type of observation
dataset=tdrso ;; dataset name for the hwrf.input
item=tdrsonm ;; item  name for the hwrf.input
# local dir name = gfs/gdas bufr_d name
tldplrso=tldplr

## Defines the GIV-TDR obstype
#
# Tells where to get GIV-TDR observations, and what to name them in
# the directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[g4tdr_obstype]
type=g4tdr ;; Name for this type of observation
dataset=g4tdrso ;; dataset name for the hwrf.input
item=g4tdrsonm ;; item  name for the hwrf.input
# local dir name = gfs/gdas bufr_d name
tldplrso=tldplr

## Defines HIWRAP obstype, exactly same as the old TDR obstype
#
# Tells where to get HIWRAP observations, and what to name them in
# the directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[hiwrap_obstype]
type=hiwrap ;; Name for this type of observation
dataset=hiwrapso ;; dataset name for the hwrf.input
item=hiwrapsonm ;; item  name for the hwrf.input
# local dir name = gfs/gdas bufr_d name
tldplrso=tldplr

## Defines the new TDR obstype
#
# Tells where to get TDR observations, and what to name them in the
# directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[tdr_new_obstype]
type=tdr_new ;; Name for this type of observation 
dataset=tdr ;; dataset name for the hwrf.input
item=gdas1_bufr ;; item  name for the hwrf.input
# local dir name = gfs/gdas bufr_d name
tldplrbufr=tldplr

## Configures the hwrf.ensda.enada_pre_object_for() which decides
## whether to run the ENSDA.  
[ensda_pre]
# Check next cycle for TDR data
catalog = {input_catalog} ;; The section for configuring hwrf.input.DataCatalog
dataset=tdr ;; The dataset for hwrf.input to find TDR data
item=gdas1_bufr ;; The item for hwrf.input to find TDR data
obstype=tldplr ;; The obstype for hwrf.input to find TDR data
tdr_flag_file={com}/{stormlabel}.run_ensda ;; The path to the ENSDA flag file
tdr_flag_file2={com}/run_ensda.{vit[stnum]:02d}{vit[basin1lc]}.{cycle} ;; second ENSDA flag file
numofcheck=2 ;; Number of times to check for the TDR trigger file
checksecinv=300 ;; Time between checks of the TDR trigger file
run_ensda_wmax=17.0 ;; the maximum wind threshold used to turn on HWRF ensemble

## Defines the satellite wind obstype
[sat_wnd_obstype]
type=satwnd ;; Name of this group of obstypes.
dataset=gfs ;; Dataset for the hwrf.input to find data
item=gfs_bufr ;; Item for hwrf.input to find data
satwndbufr=satwnd ;; GFS/GDAS bufr_d name for satwndbufr data

## Defines the satellite radiance obstypes
#
# Tells where to get satellite observations, and what to name them in
# the directory when running GSI.  See
# hwrf.gsi.GSIBase.grab_obstype_section() for details.
[sat_radiance_obstypes]
type=satellite ;; Name of this group of obstypes.  MUST be satellite
dataset=gfs ;; Dataset for the hwrf.input to find data
item=gfs_bufr ;; Item for hwrf.input to find data
# local dir name = gfs/gdas bufr_d name
gpsrobufr=gpsro ;; GFS/GDAS bufr_d name for gpsrobufr data
gsndrbufr=goesnd ;; GFS/GDAS bufr_d name for GOES sounder radiance
gsnd1bufr=goesfv ;; GFS/GDAS bufr_d name for GOES 1x1 sounder radiance
hirs2bufr=1bhrs2 ;; GFS/GDAS bufr_d name for HIRS/2 radiance
amsuabufr=1bamua ;; GFS/GDAS bufr_d name for AMSU/A radiance
amsubbufr=1bamub ;; GFS/GDAS bufr_d name for AMSU/B radiance
hirs3bufr=1bhrs3 ;; GFS/GDAS bufr_d name for HIRS/3 radiance
hirs4bufr=1bhrs4 ;; GFS/GDAS bufr_d name for HIRS/4 radiance
mhsbufr=1bmhs ;; GFS/GDAS bufr_d name for MHS radiance
#mhsbufrears=esmhs ;; EARS/RARS MHS radiance file
#mhsbufr_db=mhsdb ;; direct broadcast MHS radiance file
airsbufr=airsev ;; GFS/GDAS bufr_d name for AIRS radiace
seviribufr=sevcsr ;; GFS/GDAS bufr_d name for SEVIRI radiace
iasibufr=mtiasi ;; GFS/GDAS bufr_d name for IASI radiace
#iasibufrears=esiasi ;; EARS/RARS IASI radiace file
#iasibufr_db=iasidb ;; direct broadcast IASI radiace file
amsuabufrears=esamua ;; GFS/GDAS bufr_d name for EARS AMSU/A radiance
#amsuabufr_db=amuadb ;; direct broadcast AMSU/A radiance file
amsubbufrears=esamub ;; GFS/GDAS bufr_d name for EARS AMSU/B radiance
#amsubbufr_db=amubdb ;; direct broadcast AMSU/B radiance file
hirs3bufrears=eshrs3 ;; GFS/GDAS bufr_d name for EARS HIRS/3 radiance
#hirs3bufr_db=hrs3db ;; direct broadcast HIRS/3 radiance file
ssmitbufr=ssmit ;; GFS/GDAS bufr_d name for SSMI radiace
amsrebufr=amsre ;; GFS/GDAS bufr_d name for AMSRE radiace
ssmisbufr=ssmisu ;; GFS/GDAS bufr_d name for SSMIS radiace
atmsbufr=atms ;; GFS/GDAS bufr_d name for ATMS radiance
#atmsbufrears=atmsdb ;; Input direct broadcast ATMS radiance file
#atmsbufr_db=esatms ;; EARS/RARS ATMS radiance file
crisbufr=cris ;; GFS/GDAS bufr_d name for CRIS radiance
#crisbufrears=escris ;; EARS/RARS CRIS radiance file 
#crisbufr_db=crisdb ;; direct broadcast CRIS radiance file
#crisfsbufr=crisfs ;; GFS/GDAS CRIS on SNPP radiance
#crisfsbufrears=escrisfs ;; GFS/GDAS EARS/RARS CRIS-FSR radiance file
#crisfsbufr_db=crisfsdb ;; GFS/GDAS direct broadcast CRIS-FSR radiance file
#avhambufr=avcsam ;; GFS/GDAS bufr_d name for AVHRR GAC (AM) data
#avhpmbufr=avcspm ;; GFS/GDAS bufr_d name for AVHRR GAC (PM) data
#ahibufr=ahi ;; GFS/GDAS bufr_d name for AHI radiace
#amsr2bufr=amsr2 ;; GFS/GDAS bufr_d name for AMSR2 L1B brightness temperature
saphirbufr=saphir ;; GFS/GDAS bufr_d name for SAPHIR radiance
gmibufr=gmi ;; GMI L1CR brightness temperature
#sbuvbufr=osbuv8 ;; NOAA POES SBUV ozone retrieval file
#mlsbufr=mls ;; MLS ozone retrieval file
#gomebufr=gome ;; GOME ozone retrieval file
#omibufr=omi ;; OMI ozone retrieval file
#ssmirrbufr=spssmip ;; SSM/I precipitation rate
#tmirrbufr=sptrmm ;; TMI precipitation rate

# -----------------------------------------------------------------------
# EnKF configuration.
# -----------------------------------------------------------------------

# Used to configure EnKF
[enkf]
catalog = {input_catalog} ;; catalog for the hwrf.input.DataCatalog
nml_file={PARMhwrf}/hwrf_enkf.nml ;; EnKF name list input file
nml_section = enkf_nml ;; Section used to configure the EnKF namelist file
ensemble_nml_file={PARMhwrf}/hwrf_ensemble.nml ;; wrf-ensemble namelist input file
ensemble_nml_section=enkf_ensemble_nml ;; Section used to configure the hwrf_ensemble namelist file
interpolate_nml_section=recenter_interpolate_nml ;; Section used to configure the hwrf_interpolate namelist file
interpolate_nml_file={PARMhwrf}/hwrf_interpolate.nml ;; hwrf_interpolate namelist
## The name of the enkf status file in the com directory:
enkfstatus={com}/{stormlabel}.enkf_status
## Second enkf file name that mimmics the old operational ocean status file.
#
# This second wave file uses a filename that cannot be predicted in
# advanced.  That breaks ecFlow and Rocoto.  The file is still
# created for backward compatibility.
enkfstatus2={com}/enkf_status.{vit[stormname]}{vit[stnum]:02d}{vit[basin1lc]}.{cycle}
diagpre={com}/{out_prefix}.enkf ;; Filename prefix for EnKF output


# Used to configure EnKF namelist
[enkf_nml]
# Nothing needed here yet.
#
# Used to configure hwrf_ensemble namelist
[enkf_ensemble_nml]
# Nothing needed here yet.
#
[recenter_interpolate_nml]
INTRP_FROM_FILENAME=wrfghost_d02 ;; interpolated from filename
INTRP_TO_FILENAME=enkf_anl_mean ;; interpolated to filename
IBLEND=1 ;; blending mode, blend around the domain boundary

[enkfmerge]
interpolate_nml_section=enkfmerge_interpolate_nml ;; Section used to configure the hwrf_interpolate namelist file
interpolate_nml_file={PARMhwrf}/hwrf_interpolate.nml ;; hwrf_interpolate namelist

#
[enkfmerge_interpolate_nml]
INTRP_FROM_FILENAME=hwrf_enkf_anl ;; interpolated from filename
INTRP_TO_FILENAME=wrfanl_d02 ;; interpolated to filename
IBLEND=2 ;; blending mode, blend around vortex

# -----------------------------------------------------------------------
# POST configuration.
# -----------------------------------------------------------------------

## Wavewatch3 post-processing configuration.
[ww3post]
ww3_grib_post=yes        ;; Produce WW3 grid output in grib2 format
ww3_ounf_post=yes        ;; Produce WW3 grid output in netcdf format
ww3_ounp_spec_post=yes   ;; Produce WW3 point spectral output in netcdf format
ww3_outp_bull_post=yes   ;; Produce WW3 point output in bullitin format
ww3_outp_spec_post=yes   ;; Produce WW3 point output in spectral format

## Configure forecast output products
#
# Configures output of the forecast, post, tracker, and other forecast
# and post-processing tasks.  This configures both delivery to COM and
# the direct output of the programs.  See the hwrf.hwrfsystem for
# details on most of these options.
[forecast_products]
#ww3_restart_start=6   ;; - not yet implemented
#ww3_restart_end=18    ;; - not yet implemented
ww3_output_step=3600   ;; Seconds between WW3 main output file times
ww3_pntout_step=3600   ;; Seconds between WW3 main output file times
ww3_restart_step=21600  ;; Seconds between WW3 restart file times

## WRF forecast output frequency in seconds
#
# Output frequency of the main WRF output stream, in integer seconds
# greater than 0.  Valid values are 10800, 3600 or anything that
# integer divides 3600.  In the special case of 10800, the output will
# still be hourly from hours 0-9.  Examples:
#
# Value | Output Hours 0-9           | Output After Hour 9
# ----- | -------------------------- | ----------------------
# 10800 | Every 3600 Seconds (1hr)   | 10800 seconds (3hrs)
# 3600  | Every 3600 Seconds (1hr)   | 3600 seconds (1hr)
# 1800  | Every 1800 Seconds (30min) | 1800 seconds (30 min)
# 900   | Every 900 Seconds (15 min) | 900 seconds (15 min)
wrf_output_step=10800

## POM forecast output frequency in seconds
#
# Output frequency of the POM model in seconds.  Must be a multiple of
# 5400 (1.5 hours), and must integer divide a day (86400).  Typical
# values:
#
# + 86400 (a day)
# + 21600 (every 6 hours)
# + 10800 (every 3 hours)
# + 5400 (every 1.5 hours)
pom_output_step=21600

# Post-processing start, end and step for various components:
tracker_step=1        ;; Times between tracker inputs (hours)
nonsatpost_step=1     ;; Times between non-satellite post inputs (hours)
satpost_step=6        ;; Times between satellite post inputs (hours)
wrfcopier_start=0     ;; First time to copy WRF output to COM (hours)
wrfcopier_end=9       ;; Last time to copy WRF output to COM (hours)
wrfcopier_step=3      ;; Step between times WRF output is copied to COM (hours)
combinetrack_fhr=12   ;; Length of the relocation-length track file

# Settings for GRIB1 grid 255 for each grid:
core_grid=0.02,0.02,10,10,128,501,501            ;; moving 10x10 0.02 deg grid
storm_grid=0.02,0.02,25,25,128,1251,1251         ;; moving 25x25 0.02 deg grid
trkd3_grid=0.02,0.02,25,25,128,1251,1251         ;; moving 25x25 0.02 deg grid for tracker
trkd2_grid=0.05,0.05,20,20,128,400,400           ;; moving 20x20 0.05 deg grid for tracker
trkd1_grid=0.20,0.20,20,20,128,100,100           ;; moving 20x20 0.20 deg grid for tracker
synop_grid=0.125,0.125,90.,120.,128,961,721      ;; stationary 90x120 deg grid


## GRIB2 compression method
#
# Settings for cnvgrib to convert to GRIB2.  Supported options:
#  + 32 --- complex packing with second-order differences
#  + 40 --- "lossless" jpeg 2000
#
# These are sent to the -p  option to cnvgrib and satgrib2.
grib2_compression=32

# Output filenames:
hwrftrk%com={out_prefix}.hwrftrk.f{fahr:03d}.grb ;; track input filename in COM
hwrftrk%intercom={out_prefix}.hwrftrk.grbf{fahr:02d} ;; track input filename in intercom

hwrftrkd02%com={out_prefix}.hwrftrkd02.f{fahr:03d}.grb ;; domain 1+2 track input filename in COM
hwrftrkd02%intercom={out_prefix}.hwrftrkd02.grbf{fahr:02d} ;; domain 1+2 track input filename in intercom

hwrftrkd01%com={out_prefix}.hwrftrkd02.f{fahr:03d}.grb ;; domain 1 track input filename in COM
hwrftrkd01%intercom={out_prefix}.hwrftrkd02.grbf{fahr:02d} ;; domain 1 track input filename in intercom

anl_outer={out_prefix}.wrfanl_d02 ;; wrfanl d02 file
anl_inner={out_prefix}.wrfanl_d03 ;; wrfanl d03 file

p123_core%com={out_prefix}.hwrfprs.core.0p02.f{fahr:03d}.grb2  ;; 10x10 GRIB2 non-sat
p123_storm%com={out_prefix}.hwrfprs.storm.0p02.f{fahr:03d}.grb2  ;; 25x25 GRIB2 non-sat
p123_synop%com={out_prefix}.hwrfprs.synoptic.0p125.f{fahr:03d}.grb2  ;; 90x110 GRIB2 non-sat
p123_global%com={out_prefix}.hwrfprs.global.0p25.f{fahr:03d}.grb2   ;; global GRIB2 non-sat

s123_core%com={out_prefix}.hwrfsat.core.0p02.f{fahr:03d}.grb2  ;; 10x10 GRIB2 sat
s123_storm%com={out_prefix}.hwrfsat.storm.0p02.f{fahr:03d}.grb2  ;; 25x25 GRIB2 sat
s123_synop%com={out_prefix}.hwrfsat.synoptic.0p125.f{fahr:03d}.grb2  ;; 90x110 GRIB2 sat
s123_global%com={out_prefix}.hwrfsat.global.0p25.f{fahr:03d}.grb2   ;; global GRIB2 sat

## Configures the GSI post processor (hwrf.gsipost) output products
[gsi_products]
# Settings for GRIB1 grid 255 for each grid:
d3_grid=0.02,0.02,12.,12.,128,600,600   ;; Domain 3 grid
d2_grid=0.06,0.06,30.,30.,128,500,500   ;; Domain 2 grid

## GRIB2 compression method
#
# Settings for cnvgrib to convert to GRIB2.  Supported options:
#  + 32 --- complex packing with second-order differences
#  + 40 --- "lossless" jpeg 2000
#
# These are sent to the -p  option to cnvgrib and satgrib2.
grib2_compression=32  ; complex packing with second-order differences

# Delivery settings:
hwrforg_n%com={out_prefix}.hwrforg_n.grb2f00  ;; Inner domain original data in com
hwrforg_i%com={out_prefix}.hwrforg_i.grb2f00  ;; Intermediate domain original data in com
hwrfges_n%com={out_prefix}.hwrfges_n.grb2f00  ;; Inner domain first guess data in com
hwrfges_i%com={out_prefix}.hwrfges_i.grb2f00  ;; Intermediate domain first guess data in com
hwrfanl_n%com={out_prefix}.hwrfanl_n.grb2f00  ;; Inner domain analysis data in com
hwrfanl_i%com={out_prefix}.hwrfanl_i.grb2f00  ;; Intermediate domain analysis data in com

## Configures the WRF copier (hwrf.copywrf)
[copywrf]
# Nothing needed here now

## Configures the non-satellite post-processor run on the forecast
[nonsatpost]
## Post control file for auxhist2 files (hours 1, 2, 4, 5, 7, 8)
auxhist2_control={PARMhwrf}/post/hwrf_cntrl.tracker
## Post control file for all other files
control={PARMhwrf}/post/hwrf_cntrl.nonsat

## Configures the GSI post-processor run of the hwrf.post
[gsipost]
## Post control file
control={PARMhwrf}/post/hwrf_cntrl.nonsat
needcrtm=no  ;; no=do not link CRTM fix files

## Configure the hwrf.gribtask run by the GSI post
[gsigribber]
# Configure the regribber/gribtask for the GSI.  Should be identical
# to the [regribber].
@inc=regribber

## Configures the satellite post-processor run on the forecast
[satpost]
control={PARMhwrf}/post/hwrf_cntrl.sat{basin1} ;; Post control file

## Configures the hwrf.gribtask
[regribber]
griblockdir={lockdir}/griblocks       ;; Regribber lock directory
hgt_levs={PARMhwrf}/hwrf_hgt_levs.txt ;; Tracker height level list file
tmp_levs={PARMhwrf}/hwrf_tmp_levs.txt ;; Tracker temperature level list file

## Configures the main forecast job tracker
[tracker]
namelist=trackernml ;; name of the section that generates the tracker namelist

## Configures the tracker run on only domain 1 & 2
[trackerd02]
namelist=trackernml,nothermo ;; name of the sections for generating the tracker namelist

## Configures the tracker run on only domain 1
[trackerd01]
namelist=trackernml,nothermo ;; name of the sections for generating the tracker namelist

## Tracker namelist overrides to disable thermodynamic parameters.  
#
# Sent to hwrf.namelist.Conf2Namelist when generating the tracker
# namelist for the experimental domain 1 and domain 1/2 trackers.
# This section disables the tracker thermodynamic parameters, which
# are too slow for the larger domain size.
[nothermo]
# tracker namelist overrides to disable thermodynamics parameters:
phaseinfo.phaseflag='n'
structinfo.structflag='n'
structinfo.ikeflag='n'

## Default tracker namelist settings.
#
# Sent into hwrf.namelist.Conf2Namelist to generate the tracker
# namelist.  See the GFDL Vortex Tracker documentation for details.
#
# @note The domain 1 and domain 1/2 trackers also send nothermo to
# disable the thermodynamic parameters.
[trackernml]
# tracker namelist settings:
datein.inp%model=17
fnameinfo.gmodname="hwrf"
fnameinfo.rundescr="25x25"
atcfinfo.atcfnum=81
atcfinfo.atcfname='HWRF'
trackerinfo.trkrinfo%mslpthresh=0.0015
trackerinfo.trkrinfo%v850thresh=1.5000
trackerinfo.trkrinfo%contint=100.0
phaseinfo.wcore_depth=1.0
phaseinfo.phasescheme='both'
waitinfo.wait_max_wait=3600

## Configuration for hwrf_nhc_products program, which produces the
# swath, the HTCF, the AFOS, and various other NHC products.
[nhc_products]
TierI_model=HWRF    ;; model name in Tier I file
TierI_submodel=PYHW ;; submodel name in Tier I file
TierI_realtime=no   ;; realtime/non-realtime for Tier I purposes
grads_byteswap=yes  ;; should the grads ctl file say the swath is byteswapped?
swathres=0.05  ;; resolution of storm swath in degrees
swathpad=0.3   ;; padding in degrees around storm swath

# -----------------------------------------------------------------------
# POM configuration.
# -----------------------------------------------------------------------

## Configures the MPIPOMTC ocean model
[pom]
catalog = {input_catalog} ;; catalog for the hwrf.input.DataCatalog
 
sfc_dataset=gfs           ;; Dataset for hwrf.input to find GFS surface data
sanl_item=gfs_sanl        ;; Item for hwrf.input to find GFS sanl files
sfcanl_item=gfs_sfcanl    ;; Item for hwrf.input to find GFS sfcanl files
gribA_item=gfs_gribA      ;; Item for hwrf.input to find grib2 format GFS file containing initial SST
gribA_hires_item=gfs_gribA_highres  ;; Item for hwrf.input to find hires_grib2 format GFS file containing initial SST

## Get SST from GFS GRIB2, Spectral, or NEMSIO format output files
gfssst_type=2             ;; 1=grib2, 2=nemsio, 3=spectral

loop_dataset=loopdata     ;; Loop current dataset for hwrf.input
loop_item=gfdl_loop       ;; Item for finding GFDL loop current files
wc_ring_item=gfdl_wc_ring ;; Item for finding GFDL loop wc_ring files

ini_data=gdem             ;; NCODA (ncoda), GDEM(gdem), GDEM3(gdem3, AL only), LEVIT(levit, AL only), RTOF(rtof) based initialization
rt00zonly=0               ;; If ini_data=rtof, then this switch is to contol whether or not to use 00Z RTOFS data only: yes (1), no (0)
assi_sst=1                ;; Assimilate SST (1), Do not assimilate SST(0)
oned_pom=0                ;; 1D pom (1), 3D pom(0)
geovflag=1                ;; Initial geostrophic velocity (1), No calculation of initial geostrophic velocity (0)
i270flag=1                ;; Change isplit to 45 (1) ; change isplit to 50 (2) ; else keep isplit set to 48
fplnflag=0                ;; Use fplane (1), otherwise (0)
cntrhval=22.4             ;; The latitude at which f-plane grid calculated if fplnflag is yes
ph2xflag=0                ;; Stop after phase1 (1), run phase 2 extended to ${ph2xdays} (x), else keep days set to 3.0 for phase 2
ph2xdays=8.0              ;; No of days when ph2xflag is x
pwavflag=0                ;; Wave-induced mixing(1), otherwise(0)
poutflag=0                ;; Change prtd1 to ${prtd1val} (1), else keep prtd1 set to 1.0
prtd1val=1.0              ;; Alternative value for prtd1, used only when poutflag is 1
swchflag=0                ;; Change prtd2 to ${prtd2val} and swtch to ${swtchval} (1), else keep prtd2 set to 1.0 and swtch set to 9999.
swtchval=8.0              ;; Alternative value for swtch, used only when swchflag is 1
prtd2val=0.01             ;; Alternative value for prtd2, used only when swchflag is 1

kppflag=0                 ;; Options to control vertical mixing schemes: a. POM default M-Y scheme (kppflag=0)
kpp_ric=0.36              ;;                                             b. Standard KPP scheme (kppflag=1,kpp_lt_log=.false.,kpp_ric=0.36)
kpp_lt_log=.false.        ;;                                             c. KPP scheme with explicit Langmuir mixing (kppflag=1,kpp_lt_log=.true.,kpp_ric=0.235)

# ----------------------------------------------------------------------
# HYCOM configuration.
# ----------------------------------------------------------------------

## hycom_init job configuration:
[hycominit]
catalog={input_catalog}

RTOFS_HIST=/dev/null  ;; RTOFS .a and .b file locations of historical RTOFS
RTOFS_FCST=/dev/null  ;; RTOFS .a and .b file locations of real-time RTOFS
RTOFS_TAR=/dev/null   ;; RTOFS .a.tgz and .b file locations
RTOFS_STAGE={WORKhwrf}/hycominit/RTOFSDIR ;; RTOFS staging/linking area

scrub=no              ;; Override global scrubbing option for hycom init

bools=hycombools      ;; Section with YES/NO variables for shell programs
strings=hycomstrings  ;; Section with string variables for shell programs

## hycom forecast program for the coupled forecast job
forecast_exe={EXEChwrf}/hwrf_{RUNmodIDout}_forecast

## Output restart files; should contain RUNmodIDout and ab vars
restart_outR={com}/{out_prefix}.{RUNmodIDout}.spin_restart.{ab}
## Output restart R files; should contain RUNmodIDout and ab vars
restart_out={com}/{out_prefix}.{RUNmodIDout}.restart.{ab}
## Output spin files; should contain RUNmodIDout and ab vars
spin_archv={com}/{out_prefix}.spin_archv.{ab}

## Enable river adjustment in HYCOM initialization?
adjust_river=0

## Enable temperature adjustment in HYCOM initialization?
adjust_temp=0

## Interval in hours between forcing in the 126hr forecast mode
forecast_forcing_interval=3

## Adjust wind near hurricane in HYCOM initialization?
parameterized_winds=0

## Number of hycom processors
hycom_procs=90

atmos1_dataset=gdas1      ;; Dataset for global atmospheric surface data before time 0
atmos1_flux=gdas1_sfluxgrb ;; Item for atmospheric flux data before time 0
atmos1_grid=gdas1_gribA   ;; Item for atmospheric air data before time 0
atmos2_dataset=gfs        ;; Dataset for global atmospheric surface data after time 0
atmos2_flux=gfs_sfluxgrb     ;; Item for atmospheric flux data after time 0 - using pgrb2 from 2017 (hsk)
atmos2_grid=gfs_gribA     ;; Item for atmospheric air data after time 0
ocean_dataset=rtofs       ;; Dataset for global ocean archv data
ocean_fcst=rtofs_fcst     ;; Item for ocean data after analysis time
ocean_past=rtofs_past     ;; Item for ocean data before the analysis time
ocean_now=rtofs_now       ;; Item for ocean data at the analysis time
ocean_rst=rtofs_rst       ;; Item for ocean restart files
ocean_dataset_stage=rtofsstage     ;; Dataset for global ocean archv data (staged)
ocean_fcst_name=rtofs_fcst_name     ;; Item for ocean data after analysis time
ocean_past_name=rtofs_past_name     ;; Item for ocean data before the analysis time
ocean_now_name=rtofs_now_name       ;; Item for ocean data at the analysis time
ocean_rst_name=rtofs_rst_name       ;; Item for ocean restart files

## Configuration for the ocean_post job
[hycompost]
bools=hycombools     ;; Section with YES/NO variables for shell programs
strings=hycomstrings ;; Section with string variables for shell programs

## This section contains YES/NO variables and values which will be set
## as environment variables in the shell programs.
#
# The hycombools section is filled with ENV_VAR=value entries.  Each
# ENV_VAR will be set to the boolean version of the value.  Values are
# converted to Python booleans and then to the strings YES or NO.  All
# environment variables must be valid shell and Python variable names.
[hycombools]
RUN_OCEAN={run_ocean}
RUN_WAVE={run_wave}

## This section contains variables and string values which will be set
## as environment variables in the shell programs.
#
# The hycomstrings section is filled with ENV_VAR=value entries.  The
# ENV_VAR will be set to the value, unmodified.  All environment
# variables must be valid shell and Python variable names.  The
# strings must be single line strings (no end-of-line characters) that
# are expressable in both shell and Python.  
#
# @note The special RTOFSDIR variable is set independently based on
# the hwrf.hycom.HYCOMInit.find_rtofs_data() function.
[hycomstrings]
NPROCS_o=90
out_prefix={config/out_prefix}
out_prefix_nodate={config/out_prefix_nodate}
FORECAST_DIR={dir/WORKhwrf}/runwrf
OCEAN_MODEL={ocean_model}
WAVE_MODEL={wave_model}
YMDH={config/YMDH}
stormlabel={config/stormlabel}
STORM={vit[stormname]}
STID={vit[stormid3]}
basin2={vit[pubbasin2]}
USHhwrf={dir/USHhwrf}
FIXhwrf={dir/FIXhwrf}
EXEChwrf={dir/EXEChwrf}
COMhwrf={config/com}
WORKhwrf={dir/WORKhwrf}
CASE_ROOT={config/case_root}

# -----------------------------------------------------------------------
# WRF configuration.
# -----------------------------------------------------------------------

# This configuration file does not set start/end times, I/O
# configuration, locations or task geometry.  All of that is set in
# the Python code.  However, everything else is set here.  In each
# section, the special variable "namelist" tells the parser to recurse
# into a list of conf sections to get more namelist variables.
# Sections later in the list will be parsed first.  Any variable will
# only be set once: the first time it is seen.

## Set a few variables that the Python code must be aware of to
# communicate correctly between HWRF components.  These will also be
# used to calculate or set the appropriate WPS and WRF namelist values:
[wrf]
dt = 30              ;; Timestep (seconds, a rational number)
bdystep = 21600      ;; Boundary input timestep (integer)
ibdystep = 21600     ;; Boundary input timestep initialization (integer)
ptsgm = 15000        ;; Pressure-sigma transition level (real >0)
ptop = 1000          ;; Model top pressure (real >0)
prep_hybrid=.true.   ;; Should prep_hybrid be used? (Fortran logical)
#---metgrid_soil_levels=2 

## The default WRF io_form, which can be overridden.
#
# This is the default IO form for most files, and is copied to the WRF
# namelist automatically.
# @note The auxinput1/2 streams are set in [wrf_namelist]
io_form=11 

namelist = wrf_namelist ;; Set the WRF namelist values that are NOT per domain:

## This section is read by all scripts that run the wrf or nmm_real.
[wrfexe]
# Fix file locations:
fix.eta_lookup={FIXhwrf}/hwrf_eta_micro_lookup.dat  ;; eta_micro_lookup.dat file location
fix.track={FIXhwrf}/hwrf_track                      ;; "track" file location
fix.wrf_other={FIXhwrf}/hwrf-wrf/*                  ;; glob for finding other WRF fix files
cpl_nml=cpl_nml                                     ;; coupler namelist file
dt_c=360                                            ;; coupler timestep in seconds
sleeptime=30                                        ;; sleep time between checks of child process

## This section is used to run the one minute wrfanl simulation to
#  provide inputs to the six hour forecast ensemble.  Its main purpose
#  is to disable I/O servers.
[enswrfexe]
@inc=wrfexe
nio_groups=1          ;; Number of WRF I/O server groups per domain in init jobs
nio_tasks_per_group=0,0,0 ;; Number of I/O servers per group in init jobs
poll_servers=yes      ;; Turn on server polling in init jobs if quilt servers are used (They are not.)
nproc_x=-1            ;; Init job WRF processor count in X direction (-1 = automatic)
nproc_y=-1            ;; Init job WRF processor count in Y direction (-1 = automatic)

## This section is read by the script that runs the WRF forecast job at 2km.
# It is not used by other executions of wrf.  See runwrf3km in hwrf_3km.conf for the 3km 
# configuration.
[runwrf]
sleeptime=60 ;; sleep time between checks of child processes
@inc=wrfexe
wm3c_ranks=4 ;; Number of coupler ranks
ww3_ranks=120 ;; number of wavewatch3 ranks
ocean_ranks=9 ;; number of POM ranks (must be 9)
coupled_log={WORKhwrf}/cpl.out ;; coupled forecast stdout location

## Configures the coupler namelist
#
# Read by hwrf.namelist.Conf2Namelist to generate the coupler
# namelist.  See the NCEP Coupler documentation for details.
[cpl_nml]
CPL_SETTINGS.restart=F
CPL_SETTINGS.cstepmax={cstepmax}
CPL_SETTINGS.dt_c={dt_c}
CPL_SETTINGS.INTP_INI_STUCK_OK=T
CPL_SETTINGS.VerbLev=1
CPL_SETTINGS.ia2o=1
CPL_SETTINGS.io2a=1
CPL_SETTINGS.ia2w=1
CPL_SETTINGS.iw2a=0
CPL_SETTINGS.io2w=0
CPL_SETTINGS.iw2o=0

## This section sets WRF namelist variables that are NOT on a
# per-domain basis.
#
# Sent to hwrf.namelist.Conf2Namelist to generate the WRF namelist
# settings for options that are NOT set on a per-domain basis.  See
# the WRF documentation for details.
# 
# @note Any settings specific to timesteps or output files are
# generally ignored, and should be set in [forecast_products] or the
# hwrf_expt module instead.
[wrf_namelist]

# Force NetCDF for geogrid and metgrid because PNetCDF is not supported
time_control.io_form_auxinput1=2
time_control.io_form_auxinput2=2

time_control.debug_level = 1
#domains.halo_debug = 3
physics.var_ric = 1.0
physics.coef_ric_l = 0.16
physics.coef_ric_s = 0.25
physics.co2tf = 1
physics.num_soil_layers = 4
dynamics.euler_adv = .False.
bdy_control.spec_bdy_width = 1
bdy_control.specified = .true.
domains.feedback = 1
domains.num_moves = -99
physics.pert_sas=.false.
physics.pert_pbl=.false.
physics.pert_Cd=.false.
physics.ens_pblamp=0.2
physics.ens_sasamp=50.0
physics.ens_Cdamp=0.2
physics.ens_random_seed={ENS}
domains.eta_levels=  1.000000,  0.997622,  0.995078,  0.992240,  0.989036,  0.985440,  0.981451,  0.977061,  0.972249,  0.966994,  0.961280,  0.955106,  0.948462,  0.941306,  0.933562,  0.925134,  0.915937,  0.905890,  0.894913,  0.882926,  0.869842,  0.855646,  0.840183,  0.823383,  0.805217,  0.785767,  0.765100,  0.743200,  0.720133,  0.695967,  0.670867,  0.645033,  0.618700,  0.592067,  0.565333,  0.538733,  0.512500,  0.486800,  0.461767,  0.437533,  0.414200,  0.391767,  0.370233,  0.349600,  0.329867,  0.310967,  0.292867,  0.275533,  0.258933,  0.243000,  0.227700,  0.213000,  0.198867,  0.185300,  0.172267,  0.159733,  0.147633,  0.135967,  0.124767,  0.114033,  0.103733,  0.093867,  0.084400,  0.075333,  0.066600,  0.058267,  0.050333,  0.042833,  0.035733,  0.029000,  0.022600,  0.016500,  0.010733,  0.005267,  0.000000,
logging.compute_tasks_silent = .true.
logging.io_servers_silent = .true.
logging.stderr_logging = 0
auxhist1_outname="wrfdiag_d<domain>"
auxhist2_outname="wrfout_d<domain>_<date>"
auxhist3_outname="wrfout_d<domain>_<date>"
physics.tg_option=1
physics.icloud=3
physics.cldovrlp=2
dynamics.terrain_smoothing = 2
time_control.tg_reset_stream=1

# Disable upscale feedback smoother, which is totally unused in HWRF
# anyway.  This removes an expensive halo:
domains.smooth_option=0

# Disable I/O server polling since it isn't supported for quilt_pnc
# yet.  This is not needed; quilt_pnc automatically disables polling
# anyway, but this removes a warning message that is confusing people:
namelist_quilt%poll_servers = .false.


## Configure the outermost WRF domain
#
# Mother Of All Domains (MOAD) in WRF terminology is the fixed,
# outermost domain in the simulation.  For all HWRF simulations (anl,
# ghost, forecast, ensda, etc.) the MOAD is the same, and this section
# sets all settings for that domain.  The hwrf.wrf.WRFNamelist class
# copies the MOAD settings to the child domain unless the child
# overrides them (with a few exceptions like parent_grid_ratio), so
# these sections affect all domains in all WRF simulations.
[moad]
nx = 288              ;; Domain X (rotated East) extent
ny = 576              ;; Domain Y (rotated North) extent
parent_grid_ratio = 1 ;; Must be 1 (ratio of self to self)
dx = 0.135            ;; X resolution, must be a multiple of 0.09 and same as dy
dy = 0.135            ;; Y resolution, must be a multiple of 0.09 and same as dx
start = moad          ;; Domain start location.  Must be moad
namelist = moad_namelist ;; Namelist for setting per-domain options for this domain.

## Configure the intermediate forecast and analysis domains in WRF.
[storm1outer]
nx = 265                  ;; Domain X (rotated East) extent
ny = 532                  ;; Domain Y (rotated North) extent
parent_grid_ratio = 3     ;; Parent-nest resolution ratio.  Must be 3

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = auto

namelist = namelist_outer ;; Namelist for setting per-domain options for this domain.

## Configure the innermost analysis domains in WRF.
[storm1inner]
nx = 235                  ;; Domain X (rotated East) extent
ny = 472                  ;; Domain Y (rotated North) extent
parent_grid_ratio = 3     ;; Parent-nest resolution ratio.  Must be 3

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = centered
istart = 27               ;; Ignored.
jstart = 58               ;; Ignored.
namelist = namelist_inner ;; Namelist for setting per-domain options for this domain.

## Configure the innermost ghost domains in WRF used for FGAT.
[storm1ghost]
nx = 500                  ;; Domain X (rotated East) extent
ny = 1000                 ;; Domain Y (rotated North) extent

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = centered
parent_grid_ratio = 3     ;; Parent-nest resolution ratio.  Must be 3
istart = 28               ;; Ignored.
jstart = 54               ;; Ignored

## List of sections for setting per-domain options for this domain.
namelist = namelist_inner,namelist_ghost

## Configure the intermediate ghost domains in WRF used for FGAT.
[storm1ghost_parent]
nx = 316                 ;; Domain X (rotated East) extent
ny = 630                 ;; Domain Y (rotated North) extent
parent_grid_ratio = 3    ;; Parent-nest resolution ratio.  Must be 3

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = auto

istart = 38              ;; Ignored.
jstart = 81              ;; Ignored.

## List of sections for setting per-domain options for this domain.
namelist = namelist_outer,namelist_ghost

## Configure the innermost ghost domains in WRF used for non-FGAT.
[storm1ghost_big]
nx = 529              ;; Domain X (rotated East) extent
ny = 998              ;; Domain Y (rotated North) extent

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = centered

istart = 36           ;; Ignored.
jstart = 59           ;; Ignored.
parent_grid_ratio = 3 ;; Parent-nest resolution ratio.  Must be 3

## List of sections for setting per-domain options for this domain.
namelist = namelist_inner,namelist_ghost

[storm1ghost_parent_big]
nx = 280              ;; Domain X (rotated East) extent
ny = 546              ;; Domain Y (rotated North) extent
parent_grid_ratio = 3 ;; Parent-nest resolution ratio.  Must be 3

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = auto
istart = 101          ;; Ignored.
jstart = 206          ;; Ignored.

## List of sections for setting per-domain options for this domain.
namelist = namelist_outer,namelist_ghost

## Extra per-domain WRF namelist settings for ghost domains, sent to
## hwrf.namelist.Conf2Namelist when geherating the WRF namelist.
[namelist_ghost]
# Nothing needed here yet.

## Extra per-domain WRF namelist settings for all nests.
# 
# This section sets the namelist for the intermediate and innermost
# domains.  It only needs to set values that differ from the MOAD.
# The innermost domains can override these values in namelist_inner.
# See the WRF documentation for details.
[namelist_outer]
physics.nrads = 90
physics.nradl = 90
physics.nphs = 6
physics.ncnvc = 6
physics.gwd_opt = 0
physics.ntrack = 6
physics.sas_pgcon = 0.2
physics.nomove_freq = 3.0
dynamics.coac = 1.0
physics.ntornado=6
domains.corral_y=18
domains.corral_x=9

## Extra per-domain WRF namelist settings for all nests.
# 
# Sent into the hwrf.namelist.Conf2Namelist when generating the WRF
# namelist.  This section sets per-domain settings for the innermost
# domains.  It overrides settings from namelist_outer and
# namelist_moad.See the WRF documentation for details.
[namelist_inner]
# This section sets the namelist for the 2km domains.  It only needs
# to set values that differ from the 6km domains.
physics.nrads = 270
physics.nradl = 270
physics.ntrack = 18
physics.vortex_tracker = 7
dynamics.coac = 1.2
physics.ntornado=18
domains.corral_y=18
domains.corral_x=9

## Extra per-domain WRF namelist settings for all nests.
# 
# Sent into the hwrf.namelist.Conf2Namelist when generating the WRF
# namelist.  This section sets per-domain settings for the Mother Of
# All Domains (moad), which is the outermost domain.  The settings in
# this section will be copied to all domains unless the namelist_outer
# or namelist_inner override them.  See the WRF documentation for
# details.
[moad_namelist]
dynamics.dwdt_damping_lev = 2000.
physics.mp_physics = 5
physics.ra_lw_physics = 4
physics.ra_sw_physics = 4
physics.sf_sfclay_physics = 88
physics.sf_surface_physics = 2
physics.bl_pbl_physics = 3
physics.cu_physics = 4 
physics.mommix = 1.0
physics.h_diff = 1.0
physics.gwd_opt = 2
physics.sfenth = 0.0
physics.nrads = 30
physics.nradl = 30
physics.nphs = 2
physics.ncnvc = 2
physics.gfs_alpha = -1.0
physics.sas_pgcon = 0.55
physics.vortex_tracker = 2
physics.ntrack = 6
physics.ntornado = 2
physics.sas_mass_flux=0.5
dynamics.non_hydrostatic = .true.
dynamics.wp = 0
dynamics.coac = 0.75
dynamics.codamp = 6.4
physics.nomove_freq=0

physics.icoef_sf=6
physics.lcurr_sf=.false.
physics.iwavecpl=0

domains.corral_y=18
domains.corral_x=9

# ----------------------------------------------------------------------
# HWRF Ensemble DA Settings
# ----------------------------------------------------------------------

## Configures the execution of the 6 hour ENSDA forecast
[enswrf]
namelist=wrf    ;; The section for setting up the WRF namelist

## Configures the domain used for ENSDA, about 30x30 degrees.
[ensdadom]
ny = 702        ;; Domain X (rotated East) extent
nx = 350        ;; Domain Y (rotated North) extent

## Domain initial location decision method
#
# How to decide the domain initial location:
#  - auto --- use hwrf_swcorner_dynamic.  Only immediate children of the MOAD can do this.
#  - centered --- center on the parent domain
#  - fixed --- specify a location using istart= and jstart= options
# If auto or centered are selected, the istart and jstart options are ignored.
start = auto
parent_grid_ratio = 3 ;; Parent-nest resolution ratio.  Must be 3

## List of sections for setting per-domain options for this domain.
namelist = namelist_outer,namelist_ghost

[ensda_gfsinputcheck]
# Check next cycle for TDR data
catalog = {input_catalog} ;; The section for configuring hwrf.input.DataCatalog
dataset=enkf ;; The dataset for hwrf.input to find TDR data
item=enkf_sfg ;; The item for hwrf.input to find TDR data
anl_item = enkf_siganl    ;; Item for hwrf.input to find GFS ENKF analysis files

## Configures the prep_hybrid run for ENSDA
[ensda_prep_hybrid]
namelist = prep_hybrid_namelist ;; Name of the section used to make the prep_hybrid namelist
dataset = enkf            ;; Dataset for hwrf.input to find GFS ENKF files
item = enkf_sfg           ;; Item for hwrf.input to find GFS ENKF forecast files
anl_item = enkf_siganl    ;; Item for hwrf.input to find GFS ENKF analysis files
catalog = {input_catalog} ;; Catalog section for hwrf.input.DataCatalog
threads = 8               ;; Number of threads when running hwrf_prep

## Configures the hwrf.ensda.DAEnsemble
[hwrf_da_ens]
fcsttask=ensda_runwrf  ;; Section for configuring the 6hr forecast task
realinit=enswrfexe     ;; Section for configuring the real_nmm program
prep_hybrid=ensda_prep_hybrid ;; Section for configuring prep_hybrid
enkf_merge=enkfmerge   ;; Section for configuring the enkf merge task
ensda_size=40          ;; number of ensemble members (from 30 to 80)
ensda_physics_pert=0   ;; 0: no physics pert. 1: physics pert. in ensda
post=ensda_post        ;; section to configure the post for the ensemble
regribber=regribber    ;; section to configure the regribber for finding the vortex of the ensemble
tracker=tracker        ;; section to configure the tracker for finding the vortex of the ensemble
relocate=ensda_relocate ;; section to configure the relocation for the ensemble
gsihx=gsi_enshx        ;; section to configure the gsi for the ensemble innovation 

## Configures the WRF execution for the 6hr forecast
[ensda_runwrf]
simlen=21600 ;; Simulation length in seconds
nproc_x=-1   ;; Number of processors in the X direction (-1 means auto)
nproc_y=-1   ;; Number of processors in the Y direction (-1 means auto)
nio_groups=1 ;; Number of IO server groups
nio_tasks_per_group=0 ;; Number of ranks per IO server group (0=disable)
use_restart=yes ;; if yes, output restart file, if no, output wrfout
io_netcdf=yes ;; if yes, io_form=2 NetCDF, otherwise io_form=11 PNetCDF
@inc=runwrf,wrfexe ;; Include settings from these sections

[ensda_post]
## Post control file
control={PARMhwrf}/post/hwrf_cntrl.nonsat

[ensda_relocate_pre]
ens_rlct_flag_file = {com}/{stormlabel}.run_ensda_relocate ;; The path to ENSDA flag file
intensitymin = 14.0 ;; Run relocation when intensity >= intensitymin

[ensda_relocate]
# scrub=no
initopt=1 ;; 1: relocation only (this is the only option for the ensemble)

# ----------------------------------------------------------------------
# Delivery settings
# ----------------------------------------------------------------------