#! /usr/bin/env python

##@namespace ush.hnmmb_make_jobs
# Makes the jobs/JHNMMB_* sh scripts.
#
# This script is run by EMC and NCO to generate jobs/JNMMB_* files,
# which are the shell wrappers around the Python scripts.  The calling
# convention is quite simple:
# @code{.sh}
#   ush/hnmmb_make_jobs.py
# @endcode
#
# It should be called from the directory above ush/ and jobs/.  It
# will read in the jobs/JHNMMB.in file, and parse it repeatedly, once
# per output jobs/JHNMMB_* file.  Each time through, it generates a
# dict of variables (job name, etc.) to send to a
# produtil.atparse.AtParser, which generates the final job script.

import os, sys, io, logging
import produtil.setup, produtil.atparse
from os.path import join,dirname

def job(JJOBNAME,JOBMORE='',EXNAME=None,**kwargs):
    """!Makes a dict to pass to the AtParser to generate a job with the
    given specifications.
    @param JJOBNAME  the JJOB's name: the part after "JHNMMB_"
    @param JOBMORE  unused.  Sets the JOBMORE variable
    @param EXNAME  ex-script name (the part between exhnmmb_ and .py).
               Set automatically from the JJOBNAME if absent.
    @param kwargs  inserted into the resulting dict vianmmbpdate" 
    @returns the new dict"""
    if EXNAME is None: EXNAME=JJOBNAME.lower()
    out=dict(JJOBNAME=str(JJOBNAME),
             JOBMORE=str(JOBMORE),
             EXNAME=str(EXNAME))
    out.update(PARQ='devmax2',SHAREQ='devmax2_shared')
    out.update(kwargs)
    return out
    
def make_job(jd,lines,logger):
    """!Makes one J-Job by parsing the given lines using an atparser
    @param jd  a dict to pass to the atparser
    @param lines  an array of lines from JHNMMB.in
    @param logger  where to send errors
    @returns a string containing whatever should be in the job file"""
    sio=io.StringIO()
    ap=produtil.atparse.ATParser(sio,jd,logger)
    i=0
    for line in lines:
        i+=1
        ap.parse_line(line,'JHNMMB.in',i)
    out=sio.getvalue()
    sio.close()
    return out

def main():
    """!Main program.  Loops over all known job names producing the
    resulting job file for each inside the jobs/ directory."""
    produtil.setup.setup()
    logger=logging.getLogger('hnmmb_make_jobs')
    
    # List of jobs to create:
    jobs = [job('LAUNCH'), job('NPS'), job('OCEAN_INIT'), job('RELOCATE'), job('FORECAST'), job('POST'), job('TRACKER'), job('ARCHIVE')] #[ job('NPS'), job('RELOCATE'), job('FORECAST'), job('LAUNCH'), job('RELOCATE'), job('OCEAN_INIT'), job('POST'), 
            # job('TRACKER'), job('ARCHIVE')]
    
    # Read the JHNMMB.in file:
    hnmmb_make_jobs_py=os.path.realpath(__file__)
    HOMEhnmmb=dirname(dirname(hnmmb_make_jobs_py))
    JOBhnmmb=join(HOMEhnmmb,'jobs')
    JHNMMB_in_path=join(JOBhnmmb,'JHNMMB.in')
    try:
        with open(JHNMMB_in_path,'rt') as jhnmmb_in_file:
            jhnmmb_in=jhnmmb_in_file.readlines()
    except EnvironmentError as e:
        logger.error('%s: %s'%(JHNMMB_in_path,str(e)),exc_info=True)
        sys.exit(1)
    
    # Make the jobs:
    for jd in jobs:
        filename=os.path.join(JOBhnmmb,'JHNMMB_'+jd['JJOBNAME'].upper())
        if 'JOBMORE' in jd and jd['JOBMORE']:
            # job run in multiple different ways:
            filename+='.mode.'+jd['JOBMORE']
        contents=make_job(jd,jhnmmb_in,logger)
        logger.info('%s: write file'%(filename,))
        with open(filename,'wt') as outf:
            outf.write(contents)

if __name__=='__main__': main()