## Creating Practically Perfect probabilities from SPC LSRs [config] # Final conf file used by all processes LOG_DIR = {OUTPUT_BASE}/logs METPLUS_CONF = {TMP_DIR}/metplus_final_Point2Grid_obsLSR_ObsOnly_PracticallyPerfect.conf LOG_METPLUS = {LOG_DIR}/run_metplus.Point2Grid_LSR.log.{LOG_TIMESTAMP_TEMPLATE} SCRUB_STAGING_DIR = False # LOOP_ORDER = processes # List of applications to run PROCESS_LIST = ASCII2NC, Point2Grid, RegridDataPlane # if false, loop by VALID time LOOP_BY = VALID # Format of VALID_BEG and VALID_END VALID_TIME_FMT = %Y%m%d%H # Start and end time for METplus run VALID_BEG = {ENV[ACCUM_END]} VALID_END = {ENV[ACCUM_END]} # Increment between METplus runs in seconds. Must be >= 60 VALID_INCREMENT = 3600 # Info on forecast leads and init to process LEAD_SEQ = 0 DO_NOT_RUN_EXE = False # Step 1: ASCII2NC - convert CSV to NetCDF # Time relative to valid time (in seconds if no units are specified) to allow files to be considered # valid. Set both BEGIN and END to 0 to require the exact time in the filename # Not used in this example. ASCII2NC_FILE_WINDOW_BEGIN = 0 ASCII2NC_FILE_WINDOW_END = 0 # Time relative to each input file's valid time (in seconds if no units are specified) for data within the file to be # considered valid. ASCII2NC_WINDOW_BEGIN = 0 ASCII2NC_WINDOW_END = 0 # Value to pass with the -format argument to ascii2nc. See MET User's Guide for more information ASCII2NC_INPUT_FORMAT = python ASCII2NC_TIME_SUMMARY_FLAG = False ASCII2NC_TIME_SUMMARY_RAW_DATA = False ASCII2NC_TIME_SUMMARY_BEG = 000000 ASCII2NC_TIME_SUMMARY_END = 235959 ASCII2NC_TIME_SUMMARY_STEP = 300 ASCII2NC_TIME_SUMMARY_WIDTH = 600 ASCII2NC_TIME_SUMMARY_GRIB_CODES = 11, 204, 211 ASCII2NC_TIME_SUMMARY_VAR_NAMES = ASCII2NC_TIME_SUMMARY_TYPES = min, max, range, mean, stdev, median, p80 ASCII2NC_TIME_SUMMARY_VALID_FREQ = 0 ASCII2NC_TIME_SUMMARY_VALID_THRESH = 0.0 ASCII2NC_INPUT_DIR = {ENV[DCOMINspc]} ASCII2NC_INPUT_TEMPLATE = "{ENV[USHevs]}/cam/read_ascii_storm.py {ASCII2NC_INPUT_DIR}/{ENV[REP_DATE]}/validation_data/weather/spc/spc_reports_{ENV[REP_DATE]}.csv" ASCII2NC_OUTPUT_DIR = {OUTPUT_BASE}/ascii2nc ASCII2NC_OUTPUT_TEMPLATE = {ASCII2NC_OUTPUT_DIR}/spc.lsr.{valid?fmt=%Y%m%d%H?shift=-24H}_{valid?fmt=%Y%m%d%H}.nc # Step 2: POINT2GRID # Time relative to valid time (in seconds if no units are specified) to allow files to be considered # valid. Set both BEGIN and END to 0 to require the exact time in the filename # Not used in this example. POINT2GRID_FILE_WINDOW_BEGIN = 0 POINT2GRID_FILE_WINDOW_END = 0 POINT2GRID_INPUT_FIELD = Fscale POINT2GRID_INPUT_LEVEL = POINT2GRID_QC_FLAGS = 0 POINT2GRID_ADP = POINT2GRID_REGRID_METHOD = MAX POINT2GRID_REGRID_TO_GRID = {ENV[VERIF_GRID]} POINT2GRID_GAUSSIAN_DX = {ENV[VERIF_GRID_DX]} POINT2GRID_GAUSSIAN_RADIUS = {ENV[GAUSS_RAD]} POINT2GRID_PROB_CAT_THRESH = POINT2GRID_VLD_THRESH = POINT2GRID_INPUT_DIR = {ASCII2NC_OUTPUT_DIR} POINT2GRID_INPUT_TEMPLATE = {ASCII2NC_OUTPUT_TEMPLATE} POINT2GRID_OUTPUT_DIR = {OUTPUT_BASE}/point2grid/ POINT2GRID_OUTPUT_TEMPLATE = {POINT2GRID_OUTPUT_DIR}/spc.lsr.{valid?fmt=%Y%m%d%H?shift=-24H}_{valid?fmt=%Y%m%d%H}.{ENV[VERIF_GRID]}.nc # Step 3: RegridDataPlane OBS_REGRID_DATA_PLANE_RUN = True REGRID_DATA_PLANE_SKIP_IF_OUTPUT_EXISTS = False REGRID_DATA_PLANE_ONCE_PER_FIELD = False REGRID_DATA_PLANE_METHOD = MAXGAUSS REGRID_DATA_PLANE_WIDTH = 1 REGRID_DATA_PLANE_VERIF_GRID = {ENV[VERIF_GRID]} REGRID_DATA_PLANE_GAUSSIAN_DX = {ENV[VERIF_GRID_DX]} REGRID_DATA_PLANE_GAUSSIAN_RADIUS = {ENV[GAUSS_RAD]} OBS_REGRID_DATA_PLANE_VAR1_INPUT_FIELD_NAME = Fscale_mask OBS_REGRID_DATA_PLANE_VAR1_INPUT_LEVEL = "(*,*)" OBS_REGRID_DATA_PLANE_VAR1_OUTPUT_FIELD_NAME = LSR_PPF OBS_REGRID_DATA_PLANE_INPUT_DIR = {POINT2GRID_OUTPUT_DIR} OBS_REGRID_DATA_PLANE_INPUT_TEMPLATE = {POINT2GRID_OUTPUT_TEMPLATE} OBS_REGRID_DATA_PLANE_OUTPUT_DIR = {OUTPUT_BASE}/practically_perfect/ OBS_REGRID_DATA_PLANE_OUTPUT_TEMPLATE = {POINT2GRID_OUTPUT_DIR}/spc.ppf.{valid?fmt=%Y%m%d%H?shift=-24H}_{valid?fmt=%Y%m%d%H}.{ENV[VERIF_GRID]}.nc