#! /usr/bin/env python ##@namespace ush.hwrf_make_jobs # Makes the jobs/JHWRF_* sh scripts. # # This script is run by EMC and NCO to generate jobs/JHWRF_* files, # which are the shell wrappers around the Python scripts. The calling # convention is quite simple: # @code{.sh} # ush/hwrf_make_jobs.py # @endcode # # It should be called from the directory above ush/ and jobs/. It # will read in the jobs/JHWRF.in file, and parse it repeatedly, once # per output jobs/JHWRF_* file. Each time through, it generates a # dict of variables (job name, etc.) to send to a # produtil.atparse.AtParser, which generates the final job script. import os, sys, io, logging import produtil.setup, produtil.atparse from os.path import join,dirname def job(JJOBNAME,JOBMORE='',EXNAME=None,**kwargs): """!Makes a dict to pass to the AtParser to generate a job with the given specifications. @param JJOBNAME the JJOB's name: the part after "JHWRF_" @param JOBMORE unused. Sets the JOBMORE variable @param EXNAME ex-script name (the part between exhwrf_ and .py). Set automatically from the JJOBNAME if absent. @param kwargs inserted into the resulting dict via "update" @returns the new dict""" if EXNAME is None: EXNAME=JJOBNAME.lower() out=dict(JJOBNAME=str(JJOBNAME), JOBMORE=str(JOBMORE), EXNAME=str(EXNAME)) out.update(PARQ='devmax2',SHAREQ='devmax2_shared') out.update(kwargs) return out def make_job(jd,lines,logger): """!Makes one J-Job by parsing the given lines using an atparser @param jd a dict to pass to the atparser @param lines an array of lines from JHWRF.in @param logger where to send errors @returns a string containing whatever should be in the job file""" sio=io.StringIO() ap=produtil.atparse.ATParser(sio,jd,logger) i=0 for line in lines: i+=1 ap.parse_line(line,'JHWRF.in',i) out=sio.getvalue() sio.close() return out def main(): """!Main program. Loops over all known job names producing the resulting job file for each inside the jobs/ directory.""" produtil.setup.setup() logger=logging.getLogger('hwrf_make_jobs') # List of jobs to create: jobs = [ job('GSI'), job('BUFRPREP'), job('ENSDA_RELOCATE_PRE'), job('ENSDA'), job('ENSDA_OUTPUT'), job('ENSDA_PRE'), job('FORECAST'), job('GSI_POST'), job('WAVE_INIT'), job('WAVE_POST'), job('INIT'), job('LAUNCH'), job('OUTPUT'), job('UNPOST'), job('MERGE'), job('RELOCATE'), job('OCEAN_INIT'), job('POST'), job('PRODUCTS'), job('CLEANUP'), job('HYCOM_POST'), job('ENSDA_RELOCATE'), job('MEAN_HX'), job('ENSHX'), job('ENKF'), ] # Read the JHWRF.in file: hwrf_make_jobs_py=os.path.realpath(__file__) HOMEhwrf=dirname(dirname(hwrf_make_jobs_py)) JOBhwrf=join(HOMEhwrf,'jobs') JHWRF_in_path=join(JOBhwrf,'JHWRF.in') try: with open(JHWRF_in_path,'rt') as jhwrf_in_file: jhwrf_in=jhwrf_in_file.readlines() except EnvironmentError as e: logger.error('%s: %s'%(JHWRF_in_path,str(e)),exc_info=True) sys.exit(1) # Make the jobs: for jd in jobs: filename=os.path.join(JOBhwrf,'JHWRF_'+jd['JJOBNAME'].upper()) if 'JOBMORE' in jd and jd['JOBMORE']: # job run in multiple different ways: filename+='.mode.'+jd['JOBMORE'] contents=make_job(jd,jhwrf_in,logger) logger.info('%s: write file'%(filename,)) with open(filename,'wt') as outf: outf.write(contents) s=os.stat(filename) os.chmod(filename,s.st_mode | 0o111) if __name__=='__main__': main()