#! /usr/bin/env python import os, sys, glob, shutil, fileinput, time, re import produtil.setup, produtil.run, produtil.fileop, produtil.cd, produtil.batchsystem, produtil.numerics, produtil.config, produtil.dbnalert from produtil.fileop import remove_file, make_symlink, isnonempty, chdir, fortlink from produtil.run import run, exe, runstr from produtil.numerics import to_datetime, to_datetime_rel import nmmbUsh from nmmbUsh import logger import logging import hmon_vitals produtil.setup.setup() logger.info("tracker started") workdir=os.environ.get('WORKhmon') parmdir=os.environ.get('PARMhmon') atcfdir=os.environ.get('COMOUTatcf') chdir(workdir) conf=produtil.config.from_file(parmdir + "/hmon.conf") conf.cycle=os.environ.get('CYCLE') stormlabel=conf.get('config','stormlabel') label=stormlabel[5] fixdir=conf.get('dir','FIXhmon') exedir=conf.get('dir','EXEChmon') scriptdir=conf.get('dir','EXhmon') comdir=conf.get('dir','COM') syndat=conf.get('dir','syndat') cgb=conf.get('exe','COPYGB') chdir(workdir) with open(workdir + "/get_storm_info/storm_info", 'r') as f: for line in f: if "START_DATE=" in line: START_DATE = line.replace("START_DATE=", "") cycle = START_DATE.strip('\n') if "STORM_ID=" in line: STORM_ID = line.replace("STORM_ID=","") stormid = STORM_ID.strip('\n') if "START_DATE06=" in line: START_DATE06 = line.replace("START_DATE06=", "") cycle06 = START_DATE06.strip('\n') if "STORM_NAME=" in line: STORMNAME = line.replace("STORM_NAME=","") STORM_NAME = STORMNAME.strip('\n') if "STORM_LAT=" in line: STORMLAT = line.replace("STORM_LAT=","") STORM_LAT = STORMLAT.strip('\n') if "STORM_LON=" in line: STORMLON = line.replace("STORM_LON=","") STORM_LON = STORMLON.strip('\n') runtracker="runtracker" if os.path.isdir(runtracker): if os.path.isdir(runtracker)!=[]: shutil.rmtree(runtracker) produtil.fileop.makedirs(runtracker,logger=logger) chdir(runtracker) logger.info(os.getcwd()) MYRUN=scriptdir STORM_ID=stormid STORM_NUM=STORM_ID[0:2] STORM_BASIN=STORM_ID[2:3] START_DATE=cycle FLENGTH=126 FINC=0o3 FLENGTH = int(FLENGTH) CDATE=produtil.numerics.to_datetime(START_DATE) CC=CDATE.strftime("%C") YY=CDATE.strftime("%y") YYYY=CDATE.strftime("%Y") MM=CDATE.strftime("%m") DD=CDATE.strftime("%d") HH=CDATE.strftime("%H") ### run hmon_vitals.py instead of storm_location-vit.py to get storm information from tcvitals os.environ.update( # COMINarch='/gpfs/tp1/nco/ops/com/arch/prod/syndat', # or gp1 on gyre/ tp1 on tide # COMINmsg='/gpfs/hps/nco/ops/com/hur/prod/inphwrf', # storm_num='1' # request storm in slot 1 COMINarch= syndat, COMINmsg=conf.get('dir','COMmsg'), storm_num=label # request storm in slot 1 ) storm_name=STORM_NAME.lower() storm_id=STORM_ID.lower() ### get tcvital.as syndat_file=os.environ['COMINarch'] + "/syndat_tcvitals." + YYYY logger.info("syndat_file=",syndat_file) stormHour = "20" + YY + MM + DD + " " + HH if os.path.exists("tcvital.as"): os.remove("tcvital.as") lines_seen = set() with open("tcvital.as","w") as f: with open(syndat_file,"r") as myfile: for line in myfile.readlines(): if re.search(stormHour, line): if re.search(STORM_ID, line): if re.search("NHC", line): if line not in lines_seen: lines_seen.add(line) f.write(line) #compute domain for tracker atcffile=storm_name + storm_id + "." + cycle + ".trak.hmon.atcfunix" atcffile06=storm_name + storm_id + "." + cycle06 + ".trak.hmon.atcfunix" atcffile2=STORM_ID + "." + cycle + ".trak.hmon.atcfunix" produtil.fileop.deliver_file(parmdir + "/namelist.multi.in", "namelist.multi",logger=logger) make_symlink("hmon." + cycle +".trackatcfunix", atcffile, True) make_symlink("hmon." + cycle +".trackatcfunix", atcffile2, True) runonemore=0 for line in fileinput.FileInput("namelist.multi",inplace=1): line = line.replace("_BCC_",CC) line = line.replace("_BYY_",YY) line = line.replace("_BMM_",MM) line = line.replace("_BDD_",DD) line = line.replace("_BHH_",HH) line = line.replace("_YMDH_",START_DATE) line = line.replace("_stormid_",storm_name+storm_id) line = line.replace("_FREQ_","300") print(line, end=' ') produtil.fileop.remove_file("fcst_minutes",logger=logger) IFHR=0 FHR=0 FHR2="%02d" % (FHR) FHR3="%03d" % (FHR) while FHR <= FLENGTH: DATE = produtil.numerics.to_datetime_rel(int(FHR3)*3600, int(START_DATE)) CC=CDATE.strftime("%C") YY=CDATE.strftime("%y") MM=CDATE.strftime("%m") DD=CDATE.strftime("%d") HH=CDATE.strftime("%H") logger.info("DATE="+str(DATE)) fmin = (FHR*60) minstr = "%5.5d" %(fmin) ### link grib and index files for tracker from post trakfile = "hmon.trak."+storm_name+storm_id+"."+START_DATE+".f"+minstr trakfile_ix = trakfile + ".ix" make_symlink("../post/" + trakfile, trakfile,True) make_symlink("../post/" + trakfile_ix, trakfile_ix,True) ### creat fcst_minutes file IFHR=IFHR + 1 MINUTES = FHR * 60 LINE= "%4d %5d" % (IFHR, MINUTES) LINE = LINE + "\n" with open("fcst_minutes",'a') as f: f.write(LINE) mod6=FHR % 6 FHR = FHR + FINC FHR2="%02d" % (FHR) FHR3="%03d" % (FHR) logger.info("link file done, FHR="+str(FHR)) ### prepare the forts for tracker make_symlink("tcvital.as", "fort.12", True) with open("fort.14","at"):pass make_symlink("fcst_minutes", "fort.15", True) produtil.fileop.deliver_file("tcvital.as", "tcvit_rsmc_storms.txt") logger.info("unified tracker started") produtil.run.run(exe(exedir +"/hmon_unified_tracker")<"namelist.multi", logger=logger) produtil.fileop.deliver_file("fort.64", "hmon." + START_DATE+".trackatcfunix") atmos_d = comdir produtil.fileop.deliver_file(atcffile, atmos_d + "/" + atcffile, logger=logger) produtil.fileop.deliver_file(atcffile, atmos_d + "/" + atcffile2, logger=logger) # The line below makes a DBNAlert object that will alert the DBNet. # We will reuse this for all later alerts. alerter=produtil.dbnalert.DBNAlert( ['MODEL','{type}','{job}','{location}']) # arguments to dbn_alert # These "alerter()" lines send a DBN alert using the alerter object we just created. alerter(location=atmos_d + "/" + atcffile,type='HMON_ASCII') ### generate other products, eg. stats.tpc, afos, stats.short nmmbUsh.rm_glob('fort.*') make_symlink(workdir + "/get_storm_info/storm_info", ".", True) if os.path.isfile(comdir + "/" + cycle06 + "/" + stormid + "/" + atcffile06): make_symlink(comdir + "/" + cycle06 + "/" + stormid + "/" + atcffile06, "fort.19", True) make_symlink(atcffile, "fort.20", True) cmd = exe( exedir +"/hmon_stats_short") produtil.run.checkrun(cmd>"stats_short.txt", logger=logger) short = storm_name + storm_id + "." + cycle + ".grib.stats.short" afos = storm_name + storm_id + "." + cycle + ".afos" tpc = storm_name + storm_id + "." + cycle + ".stats.tpc" produtil.fileop.deliver_file("fort.41", atmos_d + "/" + short, logger=logger) produtil.fileop.deliver_file("fort.51", atmos_d + "/" + afos, logger=logger) produtil.fileop.deliver_file("fort.61", atmos_d + "/" + tpc, logger=logger) # The line below makes a DBNAlert object that will alert the DBNet. # We will reuse this for all later alerts. alerter=produtil.dbnalert.DBNAlert( ['MODEL','{type}','{job}','{location}']) # arguments to dbn_alert # These "alerter()" lines send a DBN alert using the alerter object we just created. alerter(location=atmos_d + "/" + short,type='HMON_ASCII') alerter(location=atmos_d + "/" + afos,type='HMON_ASCII') alerter(location=atmos_d + "/" + tpc,type='HMON_ASCII') ### generate swath file="../forecast/swath_d03" while not(os.path.isfile(file)) or os.path.getsize(file)<=250000000 : logger.info(file +" not ready, sleep 30") time.sleep(30) nmmbUsh.make_symlinks("../forecast/swath_d02",".") nmmbUsh.make_symlinks("../forecast/swath_d03",".") cmd = exe( exedir +"/hmon_swath") produtil.run.checkrun(cmd>"swath.txt", logger=logger) wind = storm_name + storm_id + "." + cycle + ".sfcwind.asci" rain = storm_name + storm_id + "." + cycle + ".precip.asci" produtil.fileop.deliver_file("fort.82", atmos_d + "/" + wind, logger=logger) produtil.fileop.deliver_file("fort.83", atmos_d + "/" + rain, logger=logger) produtil.fileop.deliver_file("swath.dat", atmos_d + "/" + storm_name + storm_id + "." + cycle + ".swath.dat", logger=logger) produtil.fileop.deliver_file("swath.ctl", atmos_d + "/" + storm_name + storm_id + "." + cycle + ".swath.ctl", logger=logger) # The line below makes a DBNAlert object that will alert the DBNet. # We will reuse this for all later alerts. alerter=produtil.dbnalert.DBNAlert( ['MODEL','{type}','{job}','{location}']) # arguments to dbn_alert # These "alerter()" lines send a DBN alert using the alerter object we just created. alerter(location=atmos_d + "/" + wind,type='HMON_ASCII') alerter(location=atmos_d + "/" + rain,type='HMON_ASCII') if STORM_BASIN == 'L': storm_basin='al' elif STORM_BASIN == 'E': storm_basin='ep' elif STORM_BASIN == 'C': storm_basin='cp' produtil.fileop.makedirs(atcfdir+'/'+storm_basin+STORM_NUM+CC+YY,logger=logger) glatuxarch=(atcfdir+'/'+storm_basin+STORM_NUM+CC+YY+'/ncep_a'+storm_basin+STORM_NUM+CC+YY+'.dat') logger.info("Updating NHC track file " + glatuxarch) with open(atcffile,'rt') as f: out112='' for line in f: line=line.rstrip() rline=line[0:112]+'\n' out112+=rline # Append the track in a single operation: with open(glatuxarch,'at') as o: o.write(out112) # These "alerter()" lines send a DBN alert using the alerter object we just created. strmid = storm_basin + STORM_NUM + CC + YY alerter(location=atmos_d + "/atcf/" + strmid + "/ncep_a" + strmid + ".dat",type='NHC_ATCF_HMON') logger.info("run_tracker finished")