## This is a UNIX conf file that contains all information relating to
# the HAFS configuration.
# The syntax:
#
#      [section]
#      var = value
#
## Sets basic configuration options used by all components.
# This section sets basic configuration options used by all components.
[config]
## The main configuration file.
CONFhafs={com}/{stormlabel}.conf
## The holdvars file.
HOLDVARS={com}/{stormlabel}.holdvars.txt
# RUNhafs is a component of some other file paths
RUNhafs={SUBEXPT}

# Prefix to prepend to most output files in the COM directory.
out_prefix={vit[stnum]:02d}{vit[basin1lc]}.{vit[YMDH]}
out_prefix_nodate={vit[stnum]:02d}{vit[basin1lc]}
old_out_prefix={oldvit[stnum]:02d}{oldvit[basin1lc]}.{oldvit[YMDH]}
old_out_prefix_nodate={oldvit[stnum]:02d}{oldvit[basin1lc]}
out_gridnames=parent,storm

ENS=99                ;; The ensemble number (placeholder)

# Specifies a section to use for input data catalog: fcst_{GFSVER}, wcoss_fcst_nco
input_catalog=fcst_{GFSVER}

docn_source=NONE ;; Data source for data ocean model (GHRSST, OISST, or NONE)
datm_source=NONE ;; Data source for data atmosphere model (ERA5 or NONE)

## Configure file and directory paths
[dir]
HOMEhafs={CDSAVE}/{EXPT}
WORKhafs={CDSCRUB}/{RUNhafs}/{vit[YMDH]}/{vit[stormid3]}
intercom={WORKhafs}/intercom
COMhafs={CDSCRUB}/{RUNhafs}/com/{vit[YMDH]}/{vit[stormid3]}
com={CDSCRUB}/{RUNhafs}/com/{vit[YMDH]}/{vit[stormid3]}
oldcom={CDSCRUB}/{RUNhafs}/com/{oldvit[YMDH]}/{oldvit[stormid3]}
COMOLD={oldcom}
COMIN={COMhafs}
COMOUT={COMhafs}
DCOMROOT={ENV[DCOMROOT|-/dcom]}
DCOM={DCOMROOT}
COMINnhc={DCOMROOT}/nhc/atcf/ncep
COMINjtwc={DCOMROOT}/{ENV[PDY]}/wtxtbul/storm_data
COMgfs={ENV[COMINgfs]}
COMINobs={COMgfs}
COMINgfs={COMgfs}
COMINgdas={COMgfs}
COMINarch={COMgfs}/syndat
COMINrtofs={COMrtofs}
COMINmsg={COMhafs}
COMINhafs={COMgfs}
ADECKhafs={ENV[ADECKhafs]}                  ;; A-Deck directory for graphics
BDECKhafs={ENV[BDECKhafs]}                  ;; B-Deck directory for graphics
cartopyDataDir={ENV[cartopyDataDir]}        ;; cartopyDataDir directory for graphics
statusfile={WORKhafs}/{stormlabel}.{YMDH}   ;; cycle status file
## Domain center location file in COM.
domlocfile={com}/{vit[stnum]:02d}{vit[basin1lc]}.{vit[YMDH]}.{RUN}.domain.center
## File to check in a prior cycle's com, to see if the cycle exists.
HISTCHECK={oldcom}/{oldvit[stnum]:02d}{oldvit[basin1lc]}.{oldvit[YMDH]}.{RUN}.domain.center

PARMforecast={PARMhafs}/forecast/regional   ;; The location where the forecast job will find its parm and namelist files
PARMgsi={PARMhafs}/hafs-gsi/                ;; GSI input data for everything except CRTM
PARMhycom={PARMhafs}/hycom/regional         ;; hycom parameter files
PARMww3={PARMhafs}/ww3/regional             ;; ww3 parameter files
FIXhycom={FIXhafs}/fix_hycom                ;; HYCOM fix files
FIXww3={FIXhafs}/fix_ww3                    ;; WW3 fix files
FIXcrtm={ENV[CRTM_FIX]}                     ;; CRTM fix files
FIXcdeps={FIXhafs}/fix_cdeps                ;; CDEPS fix files
FIXmeshes={FIXcdeps}/meshes                 ;; premade CDEPS meshes

# Data model locations
DOCNdir={ENV[DOCNdir]}
DATMdir={ENV[DATMdir]}
# Processed input files and meshes during workflow execution:
docn_input_path={intercom}/cdeps
datm_input_path={intercom}/cdeps

## Executable program locations
# Currently not used in the workflow script system
[exe]
tar=tar    ;; GNU Tar
htar=htar  ;; HTAR tape archiving program
hsi=hsi    ;; hsi tape manipulation program
wgrib2={ENV[WGRIB2]}
cnvgrib={ENV[CNVGRIB]}
wgrib={ENV[WGRIB]}
grbindex={ENV[GRIBINDEX]}
copygb2={ENV[COPYGB2]}
grb2index={ENV[GRB2INDEX]}
mpiserial={EXEChafs}/hafs_tools_mpiserial.x ;; Executes serial programs via MPI
MPISERIAL={EXEChafs}/hafs_tools_mpiserial.x ;; Executes serial programs via MPI

# ww3
ww3_grid = {EXEChafs}/hafs_ww3_grid.x
ww3_strt = {EXEChafs}/hafs_ww3_strt.x
ww3_prep = {EXEChafs}/hafs_ww3_prep.x
ww3_outf = {EXEChafs}/hafs_ww3_outf.x
ww3_outp = {EXEChafs}/hafs_ww3_outp.x
ww3_grib = {EXEChafs}/hafs_ww3_grib.x
ww3_gint = {EXEChafs}/hafs_ww3_gint.x
ww3_bound = {EXEChafs}/hafs_ww3_bound.x
ww3_prnc = {EXEChafs}/hafs_ww3_prnc.x
ww3_ounf = {EXEChafs}/hafs_ww3_ounf.x
ww3_ounp = {EXEChafs}/hafs_ww3_ounp.x

# hycom
hafs_get_rtofs={EXEChafs}/hafs_hycom_utils_get_rtofs.x
hafs_rtofs_subregion={EXEChafs}/hafs_hycom_utils_rtofs_subregion.x
hafs_gfs2ofs2={EXEChafs}/hafs_hycom_utils_gfs2ofs2.x
hafs_timeinterp_forcing={EXEChafs}/hafs_hycom_utils_timeinterp_forcing.x
hafs_archv2restart={EXEChafs}/hafs_hycom_utils_archv2restart.x
hafs_restart2restart={EXEChafs}/hafs_hycom_utils_restart2restart.x
hafs_archv2data3z={EXEChafs}/hafs_hycom_utils_archv2data3z.x
hafs_archv2data2d={EXEChafs}/hafs_hycom_utils_archv2data2d.x
hafs_archv3z2nc={EXEChafs}/hafs_hycom_utils_archv3z2nc.x
hafs_gfs2ofsinputs.py={USHhafs}/hafs_gfs2ofsinputs.py

[launch]

# Sanity check options for the launch job
[sanity]

[grid]
CASE=C512            ;; FV3 resolution
LEVS=82              ;; Model vertical levels: 65
gtype=regional       ;; grid type: uniform, stretch, nest, or regional
# If gridfixdir is provided and the dir exists, then the hafs_grid job will
# just copy over the pre-generated static grid fix files under the gridfixdir.
#gridfixdir={FIXhafs}/fix_fv3/{CASE}
gridfixdir=/let/hafs_grid/generate/grid
# Otherwise, hafs_grid will generate the model grid according to the following grid parameters
# Need for grid types: stretch, nest and regional
stretch_fac=1.0001    ;; Stretching factor for the grid
# Use domlon and domlat if they are specified in the config session, otherwise
# domlon and domlat will be automatically generated according to the storm
# information
target_lon={domlon}  ;; center longitude of the highest resolution tile
target_lat={domlat}  ;; center latitude of the highest resolution tile
# Need for grid types: nest and regional
nest_grids=2
parent_grid_num=1,2
parent_tile=6,7
# The following options set a 2560x2160 regional grid with a refinement ratio of 4, sitting at the center of the tile
refine_ratio=3,3     ;; specify the refinement ratio for nest grid
istart_nest=33,-999  ;; start index of the regional/nested domain on the tile's super grid
jstart_nest=73,-999
iend_nest=992,-999   ;; end index of the regional/nested domain on the tile's super grid
jend_nest=952,-999
halo=3               ;; halo size to be used in the atmosphere cubic sphere model for the grid tile.
halop1=4             ;; halo size that will be used for the orography and grid tile in chgres
halo0=0              ;; no halo, used to shave the filtered orography for use in the model
# Regional ESG grid related options
regional_esg=yes     ;; Use regional ESG grid, yes, or no
idim_nest=1440,660
jdim_nest=1320,660
delx_nest=0.027,0.009
dely_nest=0.027,0.009
halop2=5
pazi=-180.

[grid_mvnest1res]
CASE_mvnest1res=C512
LEVS_mvnest1res={grid/LEVS}
gtype_mvnest1res={grid/gtype}
gridfixdir_mvnest1res=/let/hafs_grid/generate/grid_mvnest1res
stretch_fac_mvnest1res={grid/stretch_fac}
target_lon_mvnest1res={grid/target_lon}
target_lat_mvnest1res={grid/target_lat}
nest_grids_mvnest1res=1
parent_grid_num_mvnest1res=1
parent_tile_mvnest1res=6
refine_ratio_mvnest1res=9
istart_nest_mvnest1res=33
jstart_nest_mvnest1res=73
iend_nest_mvnest1res=992
jend_nest_mvnest1res=952
regional_esg_mvnest1res={grid/regional_esg}
idim_nest_mvnest1res=4320
jdim_nest_mvnest1res=3960
delx_nest_mvnest1res=0.009
dely_nest_mvnest1res=0.009
halop2_mvnest1res=15

[grid_ens]
CASE_ENS={grid/CASE}
LEVS_ENS={grid/LEVS}
gtype_ens={grid/gtype}
gridfixdir_ens={grid/gridfixdir}
stretch_fac_ens={grid/stretch_fac}
target_lon_ens={grid/target_lon}
target_lat_ens={grid/target_lat}
nest_grids_ens={grid/nest_grids}
refine_ratio_ens={grid/refine_ratio}
istart_nest_ens={grid/istart_nest}
jstart_nest_ens={grid/jstart_nest}
iend_nest_ens={grid/iend_nest}
jend_nest_ens={grid/jend_nest}
regional_esg_ens={grid/regional_esg}
idim_nest_ens={grid/idim_nest}
jdim_nest_ens={grid/jdim_nest}
delx_nest_ens={grid/delx_nest}
dely_nest_ens={grid/dely_nest}
halop2_ens={grid/halop2}

[input]

[atm_ic]

[atm_lbc]

[atm_init]
# ccpp suites
ccpp_suite_regional_init=FV3_HAFS_v1_thompson
ccpp_suite_glob_init=FV3_HAFS_v1_thompson
ccpp_suite_nest_init=FV3_HAFS_v1_thompson

# nsst related namelist options
# Choose nstf_name=2,0,0,0,0 when nemsio type ictype
# Choose nstf_name=2,1,0,0,0 when grib2 type ictype
nstf_n1_init=2
nstf_n2_init=0
nstf_n3_init=0
nstf_n4_init=0
nstf_n5_init=0

glob_layoutx_init=8
glob_layouty_init=8
# For the nested or regional standalone domain
layoutx_init=12,12
layouty_init=20,20

write_groups_init=1
write_tasks_per_group_init=60

[atm_merge]
atm_merge_method=vortexreplace ;; vortexreplace or domainmerge

[ww3init]
scrub = no                           ;; scrub temporary files?
catalog = {input_catalog}            ;; where to get input data
# Subsection for ww3 pre task
usegfswind = yes                     ;; Time-updated GFS forcing outside HAFS domain?
input_step = 10800                   ;; Timestep between forcing updates
gfs_dataset = gfs                    ;; Dataset for GFS forcing
gfs_item = gfs_gribA                 ;; Data item for GFS forcing
gfswave_dataset = gfswave            ;; Dataset for wave boundary condition from global wave model
gdaswave_dataset = gdaswave          ;; Dataset for wave boundary condition from global wave model
ww3bdy_item = ww3bdy_ibp             ;; Data item for wave boundary condition from global wave model
ww3rst_item = ww3rst_gnh_10m         ;; Data item for wave initial condition from global wave model
ww3_bdy = yes                        ;; Use wave boundary condition from NCEP global wave model
ww3_rst = yes                        ;; Option controlling how to use initial wave condition from NCEP global model
#                                       yes: use initial wave condition from NCEP global wave model for cold start forecast cycles
#                                       always: use initial wave condition from NCEP global wave model for all forecast cycles
grid_gnh_10m_inp = {PARMww3}/ww3_grid_gnh_10m.inp
grid_inp = {PARMww3}/ww3_grid_{vit[basin1lc]}.inp
grid_mesh = {FIXww3}/ww3_mesh_{vit[basin1lc]}.nc
grid_bot = {FIXww3}/ww3_grid_{vit[basin1lc]}.bot
grid_msk = {FIXww3}/ww3_grid_{vit[basin1lc]}.msk
grid_msk2 = {FIXww3}/ww3_grid_{vit[basin1lc]}.msk2
grid_obr = {FIXww3}/ww3_grid_{vit[basin1lc]}.obr
gint_inp = {PARMww3}/ww3_gint.inp_tmpl
wind_inp = {PARMww3}/ww3_prep_WNDDummy.inp
prnc_inp_gfswind = {PARMww3}/ww3_prnc_gfswind.inp
curr_inp = {PARMww3}/ww3_prep_CURDummy.inp
strt_inp = {PARMww3}/ww3_strt.inp
bound_inp = {PARMww3}/ww3_bound_{vit[basin1lc]}.inp
shel_inp = {PARMww3}/ww3_shel.inp_tmpl
ounf_inp = {PARMww3}/ww3_ounf.inp_tmpl
ounp_spec_inp = {PARMww3}/ww3_ounp_spec.inp_tmpl
outp_info_inp = {PARMww3}/ww3_outp_info.inp_tmpl
outp_bull_inp = {PARMww3}/ww3_outp_bull.inp_tmpl
outp_spec_inp = {PARMww3}/ww3_outp_spec.inp_tmpl
grib_inp = {PARMww3}/ww3_grib.inp_tmpl
buoy_inp = {PARMww3}/ww3_buoy.inp

[hycominit1]
scrub=no
catalog={input_catalog}
hycom_domain=large    ;; small or large hycom domain
RTOFS_TAR={rtofs}/rtofs.{aYMD}   ;; RTOFS .a.tgz and .b file locations
RTOFS_STAGE={WORKhafs}/hycominit1/RTOFSDIR ;; RTOFS staging/linking area
bools=hycombools      ;; Section with YES/NO variables for shell programs
strings=hycomstrings  ;; Section with string variables for shell programs
## Output restart files; should contain RUNmodIDout and ab vars
restart_outR={com}/{out_prefix}.{RUNmodIDout}.spin_restart.{ab}
## Output restart R files; should contain RUNmodIDout and ab vars
restart_out={com}/{out_prefix}.{RUNmodIDout}.restart.{ab}
## Output spin files; should contain RUNmodIDout and ab vars
spin_archv={com}/{out_prefix}.spin_archv.{ab}
## Enable river adjustment in HYCOM initialization?
adjust_river=0
## Enable temperature adjustment in HYCOM initialization?
adjust_temp=0
## Interval in hours between forcing in the 126hr forecast mode
forecast_forcing_interval=3
atmos1_dataset=gdas1      ;; Dataset for global atmospheric surface data before time 0
atmos1_flux=gdas1_sfluxgrb ;; Item for atmospheric flux data before time 0
atmos1_grid=gdas1_gribA   ;; Item for atmospheric air data before time 0
atmos2_dataset=gfs        ;; Dataset for global atmospheric surface data after time 0
atmos2_flux=gfs_sfluxgrb  ;; Item for atmospheric flux data after time 0
atmos2_grid=gfs_gribA     ;; Item for atmospheric air data after time 0
ocean_dataset=rtofs       ;; Dataset for global ocean archv data
ocean_fcst=rtofs_fcst     ;; Item for ocean data after analysis time
ocean_past=rtofs_past     ;; Item for ocean data before the analysis time
ocean_now=rtofs_now       ;; Item for ocean data at the analysis time
ocean_rst=rtofs_rst       ;; Item for ocean restart files
ocean_dataset_stage=rtofsstage       ;; Dataset for global ocean archv data (staged)
ocean_fcst_name=rtofs_fcst_name     ;; Item for ocean data after analysis time
ocean_past_name=rtofs_past_name     ;; Item for ocean data before the analysis time
ocean_now_name=rtofs_now_name       ;; Item for ocean data at the analysis time
ocean_rst_name=rtofs_rst_name       ;; Item for ocean restart files

[hycominit2]
scrub=no
catalog={input_catalog}
hycom_domain={hycominit1/hycom_domain}
bools=hycombools      ;; Section with YES/NO variables for shell programs
strings=hycomstrings  ;; Section with string variables for shell programs
## Interval in hours between forcing in the 126hr forecast mode
forecast_forcing_interval=3
atmos1_dataset=gdas1      ;; Dataset for global atmospheric surface data before time 0
atmos1_flux=gdas1_sfluxgrb ;; Item for atmospheric flux data before time 0
atmos1_grid=gdas1_gribA   ;; Item for atmospheric air data before time 0
atmos2_dataset=gfs        ;; Dataset for global atmospheric surface data after time 0
atmos2_flux=gfs_sfluxgrb  ;; Item for atmospheric flux data after time 0
atmos2_grid=gfs_gribA     ;; Item for atmospheric air data after time 0

[vi]
vi_min_wind_for_init=9           ;; m/s, skip vortex initialization if vmax <= vi_min_wind_for_init for weak storms
vi_warm_start_vmax_threshold=20  ;; m/s
vi_bogus_vmax_threshold=50       ;; m/s
vi_storm_env=init                ;; init: from gfs/gdas init; pert: from the same source for the storm perturbation
vi_storm_relocation=yes
vi_storm_modification=yes       ;; yes: always VM; no: no VM; auto: do VM based on vmax diff; vmax_threshold: do VM based on vmax threshold
vi_composite_vortex=2            ;; 1, hwrf-based composite vortex; 2, hfsa-based composite vortex
vi_cloud=2                       ;; 0: No cloud&vertical velocity modification as same as HAFSv1; 1: GFDL & partial Thompson microphysics 2:Thompson microphysics
vi_adjust_intensity=yes          ;; place holder
vi_adjust_size=yes               ;; place holder

[gsi]
use_bufr_nr=no            ;; use non-restricted version of observational bufr files
grid_ratio_fv3_regional=1 ;; ratio of analysis grid to fv3 model grid in fv3 grid units
s_ens_h=300,50,60         ;; homogeneous isotropic horizontal ensemble localization scale (km)
s_ens_v=-0.5,-0.5,0.      ;; vertical localization scale
online_satbias=no         ;; Should we recycle satbias data?
l_both_fv3sar_gfs_ens=.false. ;; Whether or not use both gdas and regional ensembles in GSI EnVar analysis
l4densvar=.true.          ;; whether or not do 4DEnvVar
nsclgrp=2                 ;; SDL scale groups, 1 for no SDL
naensloc=3                ;; Total number of spatial localziation lengths
nhr_obsbin=3              ;; length of observation bins, e.g., 3: ensembles at 3,6,9-h; 7:, ensembles at 3,4,5,6,7,8,9-h.

[enkf]
corrlength=500            ;; length for horizontal localization in km
lnsigcutoff=1.3           ;; length for vertical localization

[analysis_merge]
analysis_merge_method=vortexreplace ;; vortexreplace or domainmerge

[forecast]
# ccpp suites
ccpp_suite_regional=FV3_HAFS_v1_thompson_nonsst
ccpp_suite_glob=FV3_HAFS_v1_thompson_nonsst
ccpp_suite_nest=FV3_HAFS_v1_thompson_nonsst

# nsst related namelist options
# Choose nstf_name=2,0,0,0,0 when nemsio type ictype
# Choose nstf_name=2,1,0,0,0 when grib2 type ictype
# Choose nstf_name=0,0,0,0,0 when ocean coupling is on
nstf_n1=0
nstf_n2=0
nstf_n3=0
nstf_n4=0
nstf_n5=0

# Coupling related options. Only useful when run_ocean=yes
# When run_ocean/run_wave=yes, then the following options are available for cpl_atm_ocn/cpl_atm_wav/cpl_wav_ocn
# nuopc_sidebyside  Two model components running side by side (no coupling) through NUOPC
# nuopc_nearest     NUOPC based direct two-way coupling through nearest-point regridding
# nuopc_bilinear    NUOPC based direct two-way coupling through bilinear regridding
# cmeps_2way        CMEPS based two-way coupling
# cmeps_1way_1to2   CMEPS based one-way coupling from component 1 to 2 only
# cmeps_1way_2to1   CMEPS based one-way coupling from component 2 to 1 only
# cmeps_sidebyside  Two model components running side by side (no coupling) through CMEPS
cpl_atm_ocn=cmeps_2way
cpl_atm_wav=cmeps_1way_1to2
cpl_wav_ocn=cmeps_sidebyside
cpl_dt=360                     ;; coupling time step in seconds
all_tasks=1080                 ;; Number of PEs for the coupled forecast: ATM(compute+io) + [OCN|MED] + [WAV] (only needed for rocoto workflow)
atm_tasks=1020                 ;; Number of PEs for the ATM component (not used yet)
ocn_tasks=60                   ;; Number of PEs for the OCN component
wav_tasks=60                   ;; Number of PEs for the WAV component
med_tasks={forecast/ocn_tasks} ;; Number of PEs for the MED component
ocean_start_dtg=auto           ;; epoch day since hycom_epoch=datetime.datetime(1900,12,31,0,0,0), e.g., 43340.00000
merge_import=.true.
icplocn2atm=1                  ;; 0, only consider SST in ATM-OCN coupling; 1, further consider SSC in ATM-OCN coupling

# Some options for FV3 model_configure
dt_atmos=90          ;; FV3 time step
# For the global domain if it exists in the model configuration
glob_k_split=1
glob_n_split=7
glob_layoutx=8
glob_layouty=8
glob_npx=769
glob_npy=769
glob_io_layoutx=1
glob_io_layouty={forecast/glob_layouty}
glob_hord_mt=5
glob_hord_vt=5
glob_hord_tm=5
glob_hord_dp=5
glob_hord_tr=-5
glob_lim_fac=1.0
glob_full_zs_filter=.true.
glob_n_zs_filter=0
glob_n_del2_weak=15
glob_max_slope=0.15
glob_rlmx=300.
glob_elmx=300.
glob_sfc_rlm=1
glob_tc_pbl=0
glob_shal_cnv=.true.
glob_do_deep=.true.
# For the nested or regional standalone domain
npx=1441,661
npy=1321,661
npz=81
k_split=2,4
n_split=5,9
layoutx=30,20
layouty=20,18
io_layoutx=1,1
io_layouty=1,1
hord_mt=1,1
hord_vt=1,1
hord_tm=1,1
hord_dp=1,1
hord_tr=-5,-5
lim_fac=3.1,3.1
full_zs_filter=.true.,.true.
n_zs_filter=1,1
n_del2_weak=15,24
max_slope=0.12,0.12
rlmx=300.,250.
elmx=300.,250.
sfc_rlm=1,1
tc_pbl=0,0
shal_cnv=.true.,.true.
do_deep=.true.,.true.
#radiation scheme calling time steps
fhswr=900.
fhlwr=900.
#aerosol option
iaer=1011
lseaspray=.false.
#deep convection entrainment rate parameter
clam_deep=0.15
c1_deep=0.002
c1_shal=0.0005
progsigma=.false.
betascu=4.0
betamcu=1.0
betadcu=1.0

# GFDL MP related options
#imp_physics=11
#iovr=1
#dt_inner=45
#dnats=1
#do_sat_adj=.true.
#lgfdlmprad=.true.

# Thompson MP related options
imp_physics=8
iovr=3
dt_inner=45
dnats=0
do_sat_adj=.false.
lgfdlmprad=.false.

# Noah LSM related options
lsm=1
ialb=1
iems=1

# Noah LSM related options
#lsm=2
#ialb=2
#iems=2

# Stochastic physics related options
do_sppt=.false.
do_shum=.false.
do_skeb=.false.

# Moving nest related options
is_moving_nest=.false.,.true.
vortex_tracker=0,7
ntrack=0,4
move_cd_x=0,0
move_cd_y=0,0

restart_interval="3 6 9"

# The write_grid_component related options
quilting=.true.
write_groups=2
write_tasks_per_group=60
write_dopost=.false.
output_history=.true.

# The option for output grid type: rotated_latlon, regional_latlon
# Currently, the cubed_sphere_grid option is only supported by the forecast job, the post and product jobs cannot work for cubed_sphere_grid yet.
#output_grid='cubed_sphere_grid'
output_grid=regional_latlon,regional_latlon_moving
output_grid_cen_lon={domlon},{domlon}  ;; central longitude
output_grid_cen_lat={domlat},{domlat}  ;; central latitude
output_grid_lon_span=100.8,20.0        ;; output domain span for longitude in regular latlon coordinate system (in degrees)
output_grid_lat_span=81.6,16.0         ;; output domain span for latitude in regular latlon coordinate system (in degrees)
output_grid_dlon=0.06,0.02             ;; output grid spacing dlon . . . .
output_grid_dlat=0.06,0.02             ;; output grid spacing dlat . . . .

#output_grid=rotated_latlon
#output_grid_cen_lon={domlon}      ;; central longitude
#output_grid_cen_lat={domlat}      ;; central latitude
#output_grid_lon_span=78.0         ;; output domain span for longitude in rotated coordinate system (in degrees)
#output_grid_lat_span=69.0         ;; output domain span for latitude in rotated coordinate system (in degrees)
#output_grid_dlon=0.03             ;; output grid spacing dlon . . . .
#output_grid_dlat=0.03             ;; output grid spacing dlat . . . .

#output_grid=regional_latlon
#output_grid_cen_lon={domlon}      ;; central longitude
#output_grid_cen_lat={domlat}      ;; central latitude
#output_grid_lon_span=96.0         ;; output domain span for longitude in regular latlon coordinate system (in degrees)
#output_grid_lat_span=69.0         ;; output domain span for latitude in regular latlon coordinate system (in degrees)
#output_grid_dlon=0.03             ;; output grid spacing dlon . . . .
#output_grid_dlat=0.03             ;; output grid spacing dlat . . . .

# Data model defaults
mesh_ocn_in=missing ;; premade mesh to use if make_mesh_ocn=no
mesh_ocn_gen={WORKhafs}/intercom/cdeps/DOCN_ESMF_mesh.nc ;; do not change
mesh_atm_in=missing ;; premade mesh to use if make_mesh_atm=no
mesh_atm_gen={WORKhafs}/intercom/cdeps/DATM_ESMF_mesh.nc ;; do not change
docn_mesh_nx_global=1440 ;; Dimensions of data ocean model in X direction
docn_mesh_ny_global=720 ;; Dimensions of data ocean model in Y direction

[forecast_ens]
# ccpp suites
ccpp_suite_regional_ens={forecast/ccpp_suite_regional}
ccpp_suite_glob_ens={forecast/ccpp_suite_glob}
ccpp_suite_nest_ens={forecast/ccpp_suite_nest}

# Some options for FV3 model_configure
dt_atmos_ens={forecast/dt_atmos}
# For the global domain if it exists in the model configuration
glob_k_split_ens={forecast/glob_k_split}
glob_n_split_ens={forecast/glob_n_split}
glob_layoutx_ens={forecast/glob_layoutx}
glob_layouty_ens={forecast/glob_layouty}
glob_npx_ens={forecast/glob_npx}
glob_npy_ens={forecast/glob_npy}
glob_io_layoutx_ens={forecast/glob_io_layoutx}
glob_io_layouty_ens={forecast/glob_io_layouty}
glob_hord_mt_ens={forecast/glob_hord_mt}
glob_hord_vt_ens={forecast/glob_hord_vt}
glob_hord_tm_ens={forecast/glob_hord_tm}
glob_hord_dp_ens={forecast/glob_hord_dp}
glob_hord_tr_ens={forecast/glob_hord_tr}
glob_lim_fac_ens={forecast/glob_lim_fac}
glob_full_zs_filter_ens={forecast/glob_full_zs_filter}
glob_n_zs_filter_ens={forecast/glob_n_zs_filter}
glob_n_del2_weak_ens={forecast/glob_n_del2_weak}
glob_max_slope_ens={forecast/glob_max_slope}
glob_rlmx_ens={forecast/glob_rlmx}
glob_elmx_ens={forecast/glob_elmx}
glob_sfc_rlm_ens={forecast/glob_sfc_rlm}
glob_tc_pbl_ens={forecast/glob_tc_pbl}
glob_shal_cnv_ens={forecast/glob_shal_cnv}
glob_do_deep_ens={forecast/glob_do_deep}

# For the nested or regional standalone domain
npx_ens={forecast/npx}
npy_ens={forecast/npy}
npz_ens={forecast/npz}
k_split_ens={forecast/k_split}
n_split_ens={forecast/n_split}
layoutx_ens={forecast/layoutx}
layouty_ens={forecast/layouty}
io_layoutx_ens={forecast/io_layoutx}
io_layouty_ens={forecast/io_layouty}
hord_mt_ens={forecast/hord_mt}
hord_vt_ens={forecast/hord_vt}
hord_tm_ens={forecast/hord_tm}
hord_dp_ens={forecast/hord_dp}
hord_tr_ens={forecast/hord_tr}
lim_fac_ens={forecast/lim_fac}
full_zs_filter_ens={forecast/full_zs_filter}
n_zs_filter_ens={forecast/n_zs_filter}
n_del2_weak_ens={forecast/n_del2_weak}
max_slope_ens={forecast/max_slope}
rlmx_ens={forecast/rlmx}
elmx_ens={forecast/elmx}
sfc_rlm_ens={forecast/sfc_rlm}
tc_pbl_ens={forecast/tc_pbl}
shal_cnv_ens=.true.
do_deep_ens=.true.

#stochastic physics related options
do_sppt_ens=.false.
do_shum_ens=.false.
do_skeb_ens=.false.

restart_interval_ens={forecast/restart_interval}

# The write_grid_component related options
quilting_ens={forecast/quilting}
write_groups_ens={forecast/write_groups}
write_tasks_per_group_ens={forecast/write_tasks_per_group}
write_dopost_ens={forecast/write_dopost}
output_history_ens={forecast/output_history}

# Placeholders currently, not implemented yet
output_grid_ens={forecast/output_grid}
output_grid_cen_lon_ens={forecast/output_grid_cen_lon}
output_grid_cen_lat_ens={forecast/output_grid_cen_lat}
output_grid_lon_span_ens={forecast/output_grid_lon_span}
output_grid_lat_span_ens={forecast/output_grid_lat_span}

output_grid_dlon_ens={forecast/output_grid_dlon}
output_grid_dlat_ens={forecast/output_grid_dlat}

[atm_post]
# Grid definition for atm_post and tracker, used by wgrib2
# Example:
#   post_gridspecs="latlon 246.6:4112:0.025 -2.4:1976:0.025"
# latlon lon0:nlon:dlon lat0:nlat:dlat
# lat0, lon0 = degrees of lat/lon for 1st grid point
# nlon = number of longitudes
# nlat = number of latitudes
# dlon = grid cell size in degrees of longitude
# dlat = grid cell size in degrees of latitude
#
# if post_gridspecs=auto, which is the default, then post_gridspecs will be automatically generated based on the output grid
post_gridspecs=auto,auto
trak_gridspecs={post_gridspecs}
satpost=.true. ;; do satellite post or not
nhcpost=.true. ;; subsetting grib2 files for NHC

[atm_post_ens]
post_gridspecs_ens=auto,auto
trak_gridspecs_ens={post_gridspecs_ens}
satpost_ens={atm_post/satpost}
nhcpost_ens=.false.

[product]
trkd12_combined=no

## Configure options for WW3 post-processing
[ww3post]
ww3_grib_post=yes        ;; Produce WW3 grid output in grib2 format
ww3_ounf_post=yes        ;; Produce WW3 grid output in netcdf format
ww3_ounp_spec_post=yes   ;; Produce WW3 point spectral output in netcdf format
ww3_outp_bull_post=yes   ;; Produce WW3 point output in bullitin format
ww3_outp_spec_post=yes   ;; Produce WW3 point output in spectral format

[hycompost]
scrub=no
bools=hycombools     ;; Section with YES/NO variables for shell programs
strings=hycomstrings ;; Section with string variables for shell programs

[archive]
mkdir=yes     ;; make the archive directory? yes or no

## Variables to set as string values when parsing the hafs_workflow.xml.in.
# This section is only used by the rocoto-based workflow
[rocotostr]
CDSAVE={dir/CDSAVE}                 ;; save area for Rocoto to use
CDNOSCRUB={dir/CDNOSCRUB}           ;; non-scrubbed area for Rocoto to use
CDSCRUB={dir/CDSCRUB}               ;; scrubbed area for Rocoto to use
PARMhafs={dir/PARMhafs}             ;; parm/ directory location
USHhafs={dir/USHhafs}               ;; ush/ directory location
EXhafs={dir/EXhafs}                 ;; scripts/ directory location
JOBhafs={dir/JOBhafs}               ;; scripts/ directory location
RUN={config/RUN}                    ;; experiment name
EXPT={config/EXPT}                  ;; experiment name
SUBEXPT={config/SUBEXPT}            ;; sub-experiment name
CPU_ACCOUNT={cpu_account}           ;; CPU account name
COMgfs={dir/COMgfs}                 ;; input GFS com directory
COMrtofs={dir/COMrtofs}             ;; input RTOFS com directory
gtype={grid/gtype}                  ;; grid type: uniform, stretch, nest, or regional
GFSVER={config/GFSVER}              ;; Version of GFS input data, e.g., PROD2021

# Specify the forecast job resources. Only a few combinations are provided. If
# needed, you may add other options in the site entity files under rocoto/sites.
#FORECAST_RESOURCES=FORECAST_RESOURCES_regional_{forecast/layoutx}x{forecast/layouty}io{forecast/write_groups}x{forecast/write_tasks_per_group}_omp2
FORECAST_RESOURCES=FORECAST_RESOURCES_{forecast/all_tasks}PE

# Variables to set as boolean values when parsing the hafs_workflow.xml.in.
# They'll be changed to YES or NO.  This section is only used by the rocoto-based workflow.
[rocotobool]
RUN_ATM_MVNEST={run_atm_mvnest}
RUN_ATM_MVNEST_ENS={run_atm_mvnest_ens}
RUN_ATM_INIT={run_atm_init}
RUN_ATM_INIT_FGAT={run_atm_init_fgat}
RUN_ATM_INIT_ENS={run_atm_init_ens}
RUN_ATM_MERGE={run_atm_merge}
RUN_ATM_MERGE_FGAT={run_atm_merge_fgat}
RUN_ATM_MERGE_ENS={run_atm_merge_ens}
RUN_ATM_VI={run_atm_vi}
RUN_ATM_VI_FGAT={run_atm_vi_fgat}
RUN_ATM_VI_ENS={run_atm_vi_ens}
RUN_GSI={run_gsi}                   ;; Do we run GSI?
GSI_D01={gsi_d01}                   ;; Do we run GSI for domain 01?
GSI_D02={gsi_d02}                   ;; Do we run GSI for domain 02?
GSI_D03={gsi_d03}                   ;; Do we run GSI for domain 03?
RUN_FGAT={run_fgat}                 ;; Do we run DA with FGAT?
RUN_ENVAR={run_envar}               ;; Do we run hybrid EnVar with either GDAS ensembles or regional ensembles?
RUN_ENSDA={run_ensda}               ;; Do we run the ensda system?
RUN_ENKF={run_enkf}                 ;; Do we run the self-cycled ensda system with EnKF analysis
RUN_ANALYSIS_MERGE={run_analysis_merge}
RUN_ANALYSIS_MERGE_ENS={run_analysis_merge_ens}
RUN_OCEAN={run_ocean}               ;; Do we run with ocean coupling?
RUN_WAVE={run_wave}                 ;; Do we run with wave coupling?
RUN_DATM={run_datm}                 ;; Do we run with a data atmosphere using CDEPS?
RUN_DOCN={run_docn}                 ;; Do we run with a data ocean using CDEPS?
RUN_DWAV={run_dwav}                 ;; Do we run with data waves using CDEPS?
MAKE_MESH_ATM={make_mesh_atm}       ;; Should the DATM mesh be generated by the workflow?
MAKE_MESH_OCN={make_mesh_ocn}       ;; Should the DOCN mesh be generated by the workflow?
RUN_GEMPAK={run_gempak}             ;; Do we run GEMPAK
RUN_HRDGRAPHICS={run_hrdgraphics}   ;; Do we run HRD graphics?
RUN_EMCGRAPHICS={run_emcgraphics}   ;; Do we run EMC graphics?
SCRUB_COM={scrub_com}               ;; Should Rocoto scrub the COM directory?
SCRUB_WORK={scrub_work}             ;; Should Rocoto scrub the WORK directory?

## This section contains YES/NO variables and values which will be set
## as environment variables in the shell programs.
#
# The hycombools section is filled with ENV_VAR=value entries.  Each
# ENV_VAR will be set to the boolean version of the value.  Values are
# converted to Python booleans and then to the strings YES or NO.  All
# environment variables must be valid shell and Python variable names.
[hycombools]
RUN_OCEAN={run_ocean}
RUN_WAVE={run_wave}

## This section contains variables and string values which will be set
## as environment variables in the shell programs.
#
# The hycomstrings section is filled with ENV_VAR=value entries.  The
# ENV_VAR will be set to the value, unmodified.  All environment
# variables must be valid shell and Python variable names.  The
# strings must be single line strings (no end-of-line characters) that
# are expressable in both shell and Python.
#
# @note The special RTOFSDIR variable is set independently based on
# the hafs.hycom.HYCOMInit.find_rtofs_data() function.
[hycomstrings]
NET={config/NET}
RUN={config/RUN}
out_prefix={config/out_prefix}
out_prefix_nodate={config/out_prefix_nodate}
YMDH={config/YMDH}
stormlabel={config/stormlabel}
STORM={vit[stormname]}
STID={vit[stormid3]}
basin2={vit[pubbasin2]}
USHhafs={dir/USHhafs}
FIXhafs={dir/FIXhafs}
PARMhafs={dir/PARMhafs}
EXEChafs={dir/EXEChafs}
COMhafs={config/com}
WORKhafs={dir/WORKhafs}