/********************************************************************** * TDRP params for MdvMerge2 **********************************************************************/ //====================================================================== // // MdvMerge2 takes MDV data from a number of separate input directories // and merges them into a single MDV file as a mosaic. // //====================================================================== //====================================================================== // // DEBUGGING AND PROCESS CONTROL. // //====================================================================== ///////////// debug /////////////////////////////////// // // Debug option. // // If set, debug messages will be printed with the appropriate level of // detail. // // // Type: enum // Options: // DEBUG_OFF // DEBUG_NORM // DEBUG_VERBOSE // debug = DEBUG_NORM; ///////////// instance //////////////////////////////// // // Process instance. // // Used for registration with procmap. // // // Type: string // instance = "gfs_0.25"; ///////////// procmap_register_interval_secs ////////// // // Interval for procmap registration. // // Set to a high value if processing takes a long time. This will // prevent the auto_restarter from killing and restarting the app. If // this value is less than or equal to the PROCMAP_REGISTER_INTERVAL // (60) then that value will be used. // // Minimum val: 60 // // Type: int // procmap_register_interval_secs = 420; //====================================================================== // // OPERATIONAL MODE AND TRIGGERING. // //====================================================================== ///////////// mode //////////////////////////////////// // // Operation mode. // // Program may be run in two modes, ARCHIVE and REALTIME. // // In REALTIME mode, the analysis is performed at regular times or as // new data arrives. // // In ARCHIVE mode, the file list is obtained from the command line or a // start and end time are specified. // // // Type: enum // Options: // ARCHIVE // REALTIME // mode = REALTIME; ///////////// trigger ///////////////////////////////// // // Trigger mechanism. // // TIME_TRIGGER: program is triggered at constant time intervals - see // time_trigger_interval. // // FILE_TRIGGER: the program triggers when all the URLs that must update // have updated, and the requisite number of URLs have updated, and the // required minimum time between triggers has elapsed. // // FCST_FILES_TRIGGER: program triggers when all lead times listed in // fcstLeadTimes[] are available in the fcst_file_trigger_url, and then // attempts to process these lead times for the input_urls. // // // Type: enum // Options: // TIME_TRIGGER // FILE_TRIGGER // FCST_FILES_TRIGGER // trigger = FCST_FILES_TRIGGER; ///////////// fcst_file_trigger_url /////////////////// // // Trigger URL. // // URL for triggering in latest_data mode. // // // Type: string // fcst_file_trigger_url = "mdvp:://localhost::$(GFS_0.25A)"; ///////////// fcstLeadTimes /////////////////////////// // // Forecast lead times, in seconds, that are available in forecast // directory input url. // // Only used if mode is set to FCST_FILES_TRIGGER. // // // Type: double // 1D array - variable length. // fcstLeadTimes = { 0, 10800, 21600, 32400, 43200 }; ///////////// time_trigger_interval /////////////////// // // Interval for TIME_TRIGGER - secs. // // For TIME_TRIGGER, this is the interval between trigger events. Time // triggers are synchronized to the hour, and occur at constant // intervals thereafter. // // Minimum val: 1 // // Type: int // time_trigger_interval = 300; ///////////// time_trigger_offset ///////////////////// // // Time trigger offset - secs. // // Time triggers occur on the hour and at constant intervals thereafter // until the next hour. This parameters allows you to schedule the // trigger to be offset from the anchored time by the specified number // of secs. For example, if you set time_trigger_interval to 600 (10 // mins) and time_trigger_offset to 420 (7 minutes), the trigger will // occur at 7, 17, 27, 37, 47 and 57 mins after the hour. // // // Type: int // time_trigger_offset = 0; ///////////// time_trigger_margin ///////////////////// // // Max time diff for searching for files relative to the trigger time - // secs. // // When matching files up with the trigger time, the difference between // the trigger time and file time must be less than this margin. // // // Type: int // time_trigger_margin = 43200; ///////////// past_only /////////////////////////////// // // Only search in the past when matching files with the trigger time. // // If TRUE, the search is forced to look for files before the trigger // time. This is helpful when running in archive mode and you don't want // to get data that has a future time stamp. // // // Type: boolean // past_only = FALSE; ///////////// min_time_between_file_triggers ////////// // // Minimum time between triggering in REALTIME FILE_TRIGGER mode, in // seconds. // // This prevents the arrival of files in FILE_TRIGGER mode from // triggering at intervals less than this amount. // // // Type: int // min_time_between_file_triggers = 120; ///////////// min_number_updated_urls ///////////////// // // Minumum number of URLs to update for trigger. // // // Type: int // min_number_updated_urls = 1; ///////////// max_realtime_valid_age ////////////////// // // Maximum file age in REALTIME file triggered mode, seconds. // // Applies to all URLs specified. // // // Type: int // max_realtime_valid_age = 43600; ///////////// sleepAfterTrigger /////////////////////// // // Number of seconds to sleep after triggering before processing the // data. The intent is to allow realtime URLs that come in nearly // simultaneously (satellite) to have a chance to catch up. // // Only applies in REALTIME mode. // // // Type: int // sleepAfterTrigger = 120; //====================================================================== // // FIELD SPECIFICATIONS. // //====================================================================== ///////////// vlevelLimits //////////////////////////// // // Option to apply vlevel limits prior to reading MDV data. // // Default is not to do this. If this option is selected, // only selected levels are included in the MDV read, which // can be handy when making a composite of 0.5 degree scans // from a multi-layered dataset. // // // Type: struct // typedef struct { // boolean applyVlevelLimits; // double vlevelMin; // double vlevelMax; // } // // vlevelLimits = { applyVlevelLimits = FALSE, vlevelMin = 0.45, vlevelMax = 0.55 }; ///////////// planeNumLimits ////////////////////////// // // Option to apply inclusive vertical plane number // limits prior to reading MDV data. Lowest plane is 0. // // Default is not to do this. If this option is selected, // only the selected plane numbers are included in the // MDV read. This can be handy when the desired output // is a merge of the lowest elevation angle // from a multi-layered dataset. // // // Type: struct // typedef struct { // boolean applyPlaneNumLimits; // int planeNumMin; // int planeNumMax; // } // // planeNumLimits = { applyPlaneNumLimits = FALSE, planeNumMin = 0, planeNumMax = 0 }; ///////////// use_specified_vlevels /////////////////// // // Option to output unevenly-spaced vertical levels. // // Created to handle meteorological data sets with denser vertical // spacing near the surface. If TRUE, must set 'vlevel_array[] values'. // // // Type: boolean // use_specified_vlevels = FALSE; ///////////// vlevel_array //////////////////////////// // // vlevel values associated with use_specified_levels. // // Only used when 'use_specified_vlevels' is TRUE. // // // Type: double // 1D array - variable length. // vlevel_array = { 0 }; ///////////// merge_fields //////////////////////////// // // Specifications for merged output field. // // 'name': specify the name of this field in the output MDV file. This // is also probably the name of the fields in the input files. However, // you may specify a different field name for the input - see input_url // for details. // // 'merge_method': MERGE_MIN - take the minimum value at a grid point; // MERGE_MAX - take the maximum value at a grid point; MERGE_MEAN - take // the mean value at a point; MERGE_SUM - sum all values at a point; // MERGE_LATEST - take the latest valid data value at a grid point // // MERGE_CLOSEST - use the data with the closet origin to the grid // point. CLOSEST requires a range field in the data so that for // overlapping points we can determine which data set is closest to the // grid point // // 'merge_encoding': the type of data used to perform the merge. For // best results FLOAT32 is recommended, especially for MERGE_SUM. // However, this uses more memory. To conserve memory, use INT8 or // INT16. If you select INT8 or INT16 you must also specify // 'merge_scale' and 'merge_bias' to be used to represent the data in // the merge. // // 'output_encoding': determines the type for storing the merged data in // the output file. This affects the size of the output file. Use // FLOAT32 for maximum precision, INT8 for smallest files. // // // Type: struct // typedef struct { // string name; // merge_method_t merge_method; // Options: // MERGE_MIN // MERGE_MAX // MERGE_MEAN // MERGE_SUM // MERGE_LATEST // MERGE_CLOSEST // encoding_t merge_encoding; // Options: // INT8 // INT16 // FLOAT32 // double merge_scale; // double merge_bias; // encoding_t output_encoding; // Options: // INT8 // INT16 // FLOAT32 // } // // 1D array - variable length. // merge_fields = { { name = "TMP_ISBL", merge_method = MERGE_LATEST, merge_encoding = FLOAT32, merge_scale = 0.5, merge_bias = -32, output_encoding = FLOAT32 } , { name = "HGT_ISBL", merge_method = MERGE_LATEST, merge_encoding = FLOAT32, merge_scale = 0.5, merge_bias = -32, output_encoding = FLOAT32 } // , // { // name = "UGRD_ISBL", // merge_method = MERGE_LATEST, // merge_encoding = FLOAT32, // merge_scale = 0.5, // merge_bias = -32, // output_encoding = FLOAT32 // } // , // { // name = "VGRD_ISBL", // merge_method = MERGE_LATEST, // merge_encoding = FLOAT32, // merge_scale = 0.5, // merge_bias = -32, // output_encoding = FLOAT32 // } }; ///////////// range_field_name //////////////////////// // // Name of range field in data set, giving range from the instrument to // the grid point. // // If MERGE_CLOSEST is used, we require a range field, from which to // determine which data set origin is closest to the grid point. This is // mostly used for radar data, and the range will be the range from the // radar. // // // Type: string // range_field_name = "range"; ///////////// fcst_data_time_type ///////////////////// // // This parameter defines what time to put in the time array when using // MERGE_LATEST. If one or more of the inputs is forecast data this // gives the option to use its gen time or its valid time. The default // is set to valid time in order for the algorithm to have the same // behavior as it had before this parameter was added. // // If set to GENTIME, then the forecast_delta is subtracted from the // forecast time (valid time) and this time is used in the time array. // If set to VALIDTIME the forecast time (valid time) is used in the // time array. This option was added for the times where you want the // non-forecast data to take priority over the forecast data. . // // // Type: enum // Options: // GENTIME // VALIDTIME // fcst_data_time_type = GENTIME; //====================================================================== // // INPUT DATA SPECIFICATIONS. // //====================================================================== ///////////// input_urls ////////////////////////////// // // Array of input data URLs and related information. // // 'url': specify the URL for the input data. For data on local disk, // the URL this can just be the directory containing the input MDV data // files. For data retrieved from a server, it is the URL for the // server. A server URL takes the following form: // 'mdvp:://host:port:directory_path'. The directory path is relative to // $DATA_DIR or $RAP_DATA_DIR. // // 'field_names': If the field names in the input file match those in // the 'merge_fields' parameter you should set field_names to an empty // string. If the field names differ, specify the input field names as a // comma-delimted list. If you wish to specify field numbers instead of // names, use a comma-delimited list of numbers. Do not mix names and // numbers. // // 'is_required': if true, the merge will only be performed if data from // this URL is available. If false, the merge will go ahead even if no // data is available from this URL. // // 'must_update': if true, data from this URL must have updated since // the previous merge before the merge can go ahead. // // // Type: struct // typedef struct { // string url; // string field_names; // boolean is_required; // boolean must_update; // } // // 1D array - variable length. // input_urls = { { url = "mdvp:://localhost::$(GFS_0.25A)", field_names = "", is_required = TRUE, must_update = FALSE } , { url = "mdvp:://localhost::EpochOps/mdv/model/gfs_0.25b", field_names = "", is_required = TRUE, must_update = FALSE } }; //====================================================================== // // OUTPUT DATA SPECIFICATIONS. // //====================================================================== ///////////// output_url ////////////////////////////// // // Output data URL. // // URL for output data files. Has the form // mdvp:://host:port:directory_path. // // // Type: string // output_url = "mdvp:://localhost::$(MERGED_GFS)"; ///////////// output_timestamp_correction ///////////// // // Correction to the output time relative to the trigger time - secs. // // You may need to correct for latency in the data system. For example, // the NEXRAD data arrives about 5 mins late, so we need to correct the // trigger time to account for this latency in the data. The correction // is ADDED to the trigger time, so to correct for latency you need to // use a negative correction. // // // Type: int // output_timestamp_correction = 0; ///////////// write_ldata ///////////////////////////// // // Write Ldata info file, when in not in REALTIME mode. // // // Type: boolean // write_ldata = TRUE; ///////////// write_as_forecast /////////////////////// // // Set to true to write as forecast data (g_HHMMSS/f_00000000.mdv // Otherwise, output will just be written to a day dir. // . // // Defaults to FALSE. // // // Type: boolean // write_as_forecast = TRUE; ///////////// output_projection /////////////////////// // // Projection of output grid. // // FLAT: (x,y) Cartesian data in km from a given origin, typical of // local radar data. // // LATLON - lat/lon grid with constant grid spacing (Mercator). // // LAMBERT - Lambert Conformal projection. // // // Type: enum // Options: // OUTPUT_PROJ_FLAT // OUTPUT_PROJ_LATLON // OUTPUT_PROJ_LAMBERT // output_projection = OUTPUT_PROJ_LATLON; ///////////// output_origin /////////////////////////// // // Origin of output grid. // // Applies to OUTPUT_PROJ_FLAT and OUTPUT_PROJ_LAMBERT. // // // Type: struct // typedef struct { // double lat; // double lon; // } // // output_origin = { lat = -90, lon = 0 }; ///////////// output_rotation ///////////////////////// // // Rotation of output grid. // // Applicable to OUTPUT_PROJ_FLAT. // // // Type: double // output_rotation = 0; ///////////// output_std_parallels //////////////////// // // Standard parallels. // // Applicable to OUTPUT_PROJ_LAMBERT only. // // // Type: struct // typedef struct { // double lat_1; // double lat_2; // } // // output_std_parallels = { lat_1 = 20, lat_2 = 45 }; ///////////// output_grid ///////////////////////////// // // Output grid parameters. // // For OUTPUT_PROJ_LATLON, minx, miny, dx and dy are in degrees. // Otherwise they are in km. // // // Type: struct // typedef struct { // long nx; // long ny; // long nz; // double minx; // double miny; // double minz; // double dx; // double dy; // double dz; // } // // output_grid = { nx = 1440, ny = 721, nz = 21, minx = 0, miny = -90, minz = 875, dx = 0.25, dy = 0.25, dz = -50 }; ///////////// output_compression ////////////////////// // // Compression method for output fields. // // // Type: enum // Options: // COMPRESSION_NONE // COMPRESSION_RLE // COMPRESSION_ZLIB // COMPRESSION_GZIP // output_compression = COMPRESSION_ZLIB; ///////////// output_data_set_name //////////////////// // // Data set name. // // This is placed in the MDV master header for documentation purposes. // // // Type: string // output_data_set_name = "gfs_0.25"; ///////////// output_data_set_source ////////////////// // // Data set source details. // // This is placed in the MDV master header for documentation purposes. // // // Type: string // output_data_set_source = "gfs_0.25a and gfs_0.25b."; ///////////// copyMetaDataFromInput /////////////////// // // Option to copy data set name, source and info strings // from the last master header read. // // May be useful for tracking data sets. If set to TRUE then // the output_data_set_name and output_data_set_source entries // in this param file are ignored. // // // Type: boolean // copyMetaDataFromInput = FALSE; ///////////// remap_input_grids /////////////////////// // // Option to do remapping on input URLs. For the vast majority // of cases this should be an empty array. The default given is // simply an example, and unless the urls given are actually used // (which is unlikely) it will have no effect. // // For lat/lon projections, minx, miny, dx and dy are in degrees. // Otherwise they are in km. The url string must match one of the input // URLs specified. The gridType string should be set to one of FLAT, // LATLON or LAMBERT. originLat and originLon are not used for // LATLON projections. // // // Type: struct // typedef struct { // string url; // string gridType; // long nx; // long ny; // double minx; // double miny; // double dx; // double dy; // double lambertTruelat1; // double lambertTruelat2; // double flatEarthRotation; // double originLat; // double originLon; // } // // 1D array - variable length. // remap_input_grids = { { url = "mdvp:://exampleHost::example/dir/struct1", gridType = "FLAT", nx = 400, ny = 400, minx = -199.5, miny = -199.5, dx = 1, dy = 1, lambertTruelat1 = 60, lambertTruelat2 = 40, flatEarthRotation = 0, originLat = 40, originLon = -105 } , { url = "mdvp:://exampleHost::example/dir/struct2", gridType = "FLAT", nx = 400, ny = 400, minx = -199.5, miny = -199.5, dx = 1, dy = 1, lambertTruelat1 = 60, lambertTruelat2 = 40, flatEarthRotation = 0, originLat = 41, originLon = -107 } };