#! /usr/bin/env python ##@namespace scripts.exhwrf_forecast #Runs the HWRF forecast based on provided input data, producing #native output suitable for the HWRF post processing suite. This #script runs either the coupled or uncoupled forecast job. Which is #chosen is based on the ocean status file. If ocean is enabled, and #ocean init succeeded, a coupled run is attempted. Otherwise, an #uncoupled run. import os, sys, re import produtil.setup, produtil.fileop, produtil.log, produtil.cluster import hwrf.mpipomtc, hwrf_wcoss, hwrf_expt import hwrf_alerts def set_vars(coupling,logger,wave,ocean,wrf_ranks): """!Sets MPI tuning variables for the local cluster.""" if produtil.cluster.name() in ['gyre','tide']: hwrf_wcoss.set_vars_for_coupled_hwrf(logger,wave,ocean,wrf_ranks) elif produtil.cluster.name() in ['luna','surge']: hwrf_wcoss.cray_fcst_vars(logger,wave,ocean,wrf_ranks) hwrf_expt.runwrf.run_exe_callback(hwrf_wcoss.cray_run_exe_callback) else: logger.info('Not on WCOSS, so not setting WCOSS-specific vars.') def doit(): """!Main program for this script: runs the forecast.""" produtil.setup.setup() hwrf_expt.init_module(preload=hwrf_expt.argv_preload) hwrf_expt.conf.add_fallback_callback(hwrf_alerts.fallback_callback) conf=hwrf_expt.conf logger=hwrf_expt.conf.log('exhwrf_forecast') ocean_flag=conf.getbool('config','run_ocean') ocean_model=conf.getstr('config','ocean_model') wave_flag=conf.getbool('config','run_wave') wave_model=conf.getstr('config','wave_model') wrf_compute_ranks=conf.getint('runwrf','wrf_compute_ranks',0) if not wrf_compute_ranks: wrf_compute_ranks=conf.getint('runwrf','wrf_ranks') run_wave=wave_flag run_ocean=ocean_flag if wave_flag: wave_success=hwrf_expt.wvstatus.get(logger) if wave_model=='WW3' and not hwrf_expt.ww3init.is_completed(): logger.warning('The ww3init completion flag is off. ' 'Wave init failed.') wave_success=False if not wave_success: basin1=conf.syndat.basin1 if basin1 in hwrf_expt.non_wave_basins: produtil.log.postmsg( 'Cannot run wave in this basin- run uncoupled.') set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) hwrf_expt.runwrf.remove_wave() elif conf.fallback('no_wave','Wave initialization failed. Will disable wave coupling.'): logger.critical( 'CRITICAL FAILURE: HWRF wave init failed, but ' 'fallbacks are enabled. Running uncoupled.') set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) hwrf_expt.runwrf.remove_wave() return else: logger.critical( 'CRITICAL FAILURE: HWRF wave init failed, and ' 'fallbacks are disabled. Aborting.') sys.exit(1) else: produtil.log.postmsg('Wave init succeeded. Running coupled.') #set_vars(True,logger,run_wave,run_ocean,wrf_compute_ranks) else: produtil.log.postmsg('Wave is disabled. Running uncoupled.') #set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) if ocean_flag: ocean_success=hwrf_expt.ocstatus.get(logger) if ocean_model=='POM' and not hwrf_expt.pominit.is_completed(): logger.warning('The pominit completion flag is off. ' 'Ocean init failed.') ocean_success=False if ocean_model=='HYCOM' and not hwrf_expt.hycominit.is_completed(): logger.warning('The hycominit completion flag is off. ' 'Ocean init failed.') #ocean_success=False if not ocean_success: basin1=conf.syndat.basin1 if basin1 in hwrf_expt.non_ocean_basins: produtil.log.postmsg( 'Cannot run ocean in this basin- run uncoupled.') set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) hwrf_expt.runwrf.remove_ocean() elif conf.fallback('no_ocean','Wave initialization failed. Will disable coupling'): logger.critical( 'CRITICAL FAILURE: HWRF ocean init failed, but ' 'fallbacks are enabled. Running uncoupled.') set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) hwrf_expt.runwrf.remove_ocean() else: logger.critical( 'CRITICAL FAILURE: HWRF ocean init failed, and ' 'fallbacks are disabled. Aborting.') sys.exit(1) else: produtil.log.postmsg('Ocean init succeeded. Running coupled.') set_vars(True,logger,run_wave,run_ocean,wrf_compute_ranks) else: produtil.log.postmsg('Ocean is disabled. Running uncoupled.') set_vars(False,logger,run_wave,run_ocean,wrf_compute_ranks) try: hwrf_expt.runwrf.run() except: cplout=hwrf_expt.runwrf.confstrinterp('{coupled_log}') if os.path.exists(cplout): #This is a coupled run cplfailed=False with open(cplout,'r') as f: for line in f: if re.search(r'GLOB_ABORT: C:',line): cplfailed=True break if cplfailed: produtil.fileop.deliver_file(cplout,cplout+'_failed',keep=False,logger=logger) logger.critical('Coupled forcast job failed due to GLOB_ABORT from coupler.\n %s'%(line)) msg=('CRITICAL FAILURE: Coupled forecast job failed due to coupler error. Aborting. ' 'Please fall back to uncoupled atmosphere standalone forecast job. See Special Procedures wiki page for details.') logger.critical(msg) raise Exception(msg) sys.exit(1) else: msg='CRITICAL FAILURE: Coupled forecast job failed. Aborting.' logger.critical(msg) raise Exception(msg) sys.exit(1) else: #This is an uncoupled run msg='CRITICAL FAILURE: Uncoupled forecast job failed. Aborting.' logger.critical(msg) raise Exception(msg) sys.exit(1) produtil.log.postmsg('Forecast complete.') # Multistorm # THIS Function IS NOT being USED. - Technically it can be deleted. # This sets the Number of compute nodes since this changes based on the number of storms. def set_total_tasks(conf): num_storms=len(conf.getstr('config','multistorm_sids','nosids').split(',')) num_domains=num_storms*2+1 nio_g=int(conf.getstr('runwrf','nio_groups')) nio_tpg=conf.getstr('runwrf','nio_tasks_per_group').split(',') npx=int(conf.getstr('runwrf_namelist','dm_task_split.nest_pes_x').split(',')[0]) npy=int(conf.getstr('runwrf_namelist','dm_task_split.nest_pes_y').split(',')[0]) num_computetasks=npx*npy num_iotasks=0 for index in range(num_domains): num_iotasks+=nio_g*int(nio_tpg[index]) total_tasks=num_computetasks + num_iotasks os.environ["TOTAL_TASKS"]=str(total_tasks) if __name__=='__main__': doit()