#! /usr/bin/env python import os, sys, glob, shutil, fileinput, re, time, stat from datetime import datetime from os import path import nmmbUsh import produtil.setup, produtil.run, produtil.fileop, produtil.cd, produtil.batchsystem, produtil.numerics, produtil.config from produtil.fileop import remove_file, make_symlink, chdir, fortlink, deliver_file, makedirs from produtil.run import run, exe, runstr from produtil.numerics import to_datetime, to_datetime_rel import logging from nmmbUsh import logger produtil.setup.setup() logger.info("relocate started") workdir=os.environ.get('WORKhmon') parmdir=os.environ.get('PARMhmon') ushdir=os.environ.get('USHhmon') conf=produtil.config.from_file(parmdir + "/hmon.conf") conf.cycle=os.environ.get('CYCLE') USE_MESSAGE=conf.get('config','use_message') STORMLABEL=conf.get('config','stormlabel') LABEL=STORMLABEL[5] msgdir=conf.get('dir','COMmsg') comdir=conf.get('dir','COM') HISTDATA=conf.get('dir','HISTDATA') fixdir=conf.get('dir','FIXhmon') exedir=conf.get('dir','EXEChmon') INPUT=conf.get('dir','COMgfs') chdir(workdir, logger=logger) ############ GET THE INPUT VARIABLES AND EXPORT ENVIRONMENTAL VARIABLES ############## with open(workdir + "/get_storm_info/storm_info", 'r') as f: for line in f: if "STORM_LAT=" in line: STORM_LAT = line.replace("STORM_LAT=", "") STORM_LAT = float(STORM_LAT) if "STORM_LON=" in line: STORM_LON = line.replace("STORM_LON=", "") STORM_LON = float(STORM_LON) if "CEN_LAT=" in line: CEN_LAT = line.replace("CEN_LAT=", "") CEN_LAT = float(CEN_LAT) if "CEN_LON=" in line: CEN_LON = line.replace("CEN_LON=", "") CEN_LON = float(CEN_LON) if "START_DATE=" in line: cycle = line.replace("START_DATE=", "") cycle = cycle.strip('\n') if "STORM_NAME=" in line: STORM_NAME = line.replace("STORM_NAME=", "") STORM_NAME = STORM_NAME.strip('\n') if "STORM_ID=" in line: storm_id = line.replace("STORM_ID=","") storm_id = storm_id.strip('\n') if "START_DATE06=" in line: cycle_m06= line.replace("START_DATE06=","") cycle_m06= cycle_m06.strip('\n') astr = STORM_NAME storm_name = astr.lower() astr = storm_id STORM_ID = astr.lower() n_st=1 cdate=produtil.numerics.to_datetime(cycle) logger.info("n_st = " + str(n_st)) logger.info("cdate = " + str(cdate)) yyyy=cdate.strftime("%Y") cc=cdate.strftime("%C") yy=cdate.strftime("%y") mm=cdate.strftime("%m") dd=cdate.strftime("%d") hh=cdate.strftime("%H") yymmdd=cdate.strftime("%y%m%d") yyyymmdd=cdate.strftime("%Y%m%d") yymmddhh=cdate.strftime("%y%m%d%H") yyyymmddhh=cdate.strftime("%Y%m%d%H") YEAR=yyyy MONTH=mm DATE=dd CYCLE=hh FHR=0 logger.info('YEAR = ' + str(YEAR)) logger.info('MONTH = ' + str(MONTH)) logger.info('DATE = ' + str(DATE)) logger.info('CYCLE = ' + str(CYCLE)) logger.info('storm_id = ' + str(storm_id)) # export environent: # MODEL='NMMB' PARAFLAG='YES' INIT='YES' GSI='NO' GSID01='NO' GSID02='NO' GSID03='NO' innercoremerge='NO' IO_FORM=1 FIND_GFS_VORTEX='NO' FGATSTR=-3 FGATEND=3 FGATINV=3 MODIN='GFS' CASE_ROOT='HISTORY' DATA= workdir +"/reloc/" + yyyymmddhh + "/" + storm_id ATMOS_INPUT = conf.get('dir','COMgfso') atmos_d=comdir gfstrack="trak.gfso.atcfunix."+yyyymmddhh if USE_MESSAGE=='YES': SYNDAT_CORRECTED= msgdir + "/message" + LABEL else: SYNDAT_CORRECTED= conf.get('dir','syndat') + "/syndat_tcvitals." + yyyy logger.info('SYNDAT_CORRECTED='+str(SYNDAT_CORRECTED)) ############### Run Tracker for GFS data ################################################################ gfstrackdir = workdir + "/gfstrack" nmmbUsh.removeall(gfstrackdir) makedirs(gfstrackdir) chdir(gfstrackdir) stormHour = yy + mm + dd + " " + hh lines_seen = set() with open("vitals.gfso","w") as f: with open(SYNDAT_CORRECTED,"r") as myfile: for line in myfile.readlines(): if re.search(stormHour, line) and re.search(storm_id, line) and re.search("NHC", line) and re.search(STORM_NAME, line): if line not in lines_seen: lines_seen.add(line) f.write(line) if USE_MESSAGE == 'YES': for line in fileinput.FileInput("vitals.gfso",inplace=1): line = line.replace(yymmdd,yyyymmdd) print(line, end=' ') produtil.fileop.deliver_file(parmdir + "/namelist.gfs.in", "namelist.gfs",logger=logger) for line in fileinput.FileInput("namelist.gfs",inplace=1): line = line.replace("_BCC_",cc) line = line.replace("_BYY_",yy) line = line.replace("_BMM_",mm) line = line.replace("_BDD_",dd) line = line.replace("_BHH_",hh) line = line.replace("_YMDH_",cycle) print(line, end=' ') FHR = 0 FHR3 = "%03d" %(FHR) gfsfile = "gfs.t" + hh + "z.pgrb2.0p25.f" + FHR3 FileName= "../nps_ic/GRIBFILE.AAA" make_symlink(FileName, gfsfile, True) wgrib_s=runstr(exe(os.environ["WGRIB2"])['-s', gfsfile]) keep='' for line in wgrib_s.splitlines(): if re.search('(:HGT:850 mb:|:HGT:700 mb:|:UGRD:850 mb:|:UGRD:700 mb:|:UGRD:500 mb:|:VGRD:850 mb:|:VGRD:700 mb:|:VGRD:500 mb:|:ABSV:850 mb:|:ABSV:700 mb:|:MSLET:mean sea level:|:UGRD:10 m above ground:|:VGRD:10 m above ground:)',line): keep+=line+'\n' gfile="gfsgribfile." + yyyy + mm + dd + hh ixfile="gfsixfile." + yyyy + mm + dd + hh produtil.run.checkrun(exe(os.environ["WGRIB2"])['-i', gfsfile, '-grib', gfile] << keep) gbi=os.environ.get("GRB2INDEX") cmd=exe(gbi)[gfile,ixfile] produtil.run.run(cmd, logger=logger) IFHR = 1 MINUTES = FHR * 60 LINE= "%4d %5d" % (IFHR, MINUTES) LINE = LINE + "\n" with open("fcst_minutes",'a') as f: f.write(LINE) with open(gfstrack,"at"):pass make_symlink(gfile, "fort.11", True) make_symlink(ixfile, "fort.31", True) make_symlink("vitals.gfso", "fort.12", True) with open("fort.14","at"):pass make_symlink("fcst_minutes", "fort.15", True) make_symlink(gfstrack, "fort.64", True) produtil.run.run(exe(exedir + "/hmon_unified_tracker")<"namelist.gfs", logger=logger) if os.stat(gfstrack).st_size==0: logger.info("NO GFS storms") produtil.fileop.deliver_file(atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_01_nemsio_nps",atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_01_nemsio",logger=logger) produtil.fileop.deliver_file(atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_02_nemsio_nps",atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_02_nemsio",logger=logger) produtil.fileop.deliver_file(atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_03_nemsio_nps",atmos_d+"/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_03_nemsio",logger=logger) sys.exit(0) ######################################################################################################### data_tmp = comdir datadir = DATA nmmbUsh.removeall(DATA) makedirs(DATA) chdir(DATA) ############################################################################## now=datetime.utcnow() logger.info(now.strftime("%Y-%m-%d %H:%M")) INIT='YES' if INIT == 'NO': sys.exit("Initialization not requested. INIT = NO This script need not be run") logger.info("VORTEX RELOCATION BEGINS") storm_n=str(storm_id)[0:2] basin=str(storm_id)[2:3] if basin == 'L' or basin == 'l': basin='AL' elif basin == 'E' or basin == 'e' : basin='EP' else: logger.info("add basin parameters") logger.info('basin=' + str(basin)) n_st2 = n_st * 2 n_st3 = n_st2 + 1 logger.info("n_st2 = " + str(n_st2)) logger.info("n_st3 = " + str(n_st3)) logger.info("Runs the hmon_diffwrf_3dvar program to update the output domains.") stormHour = yy + mm + dd + " " + hh with open("tmpvit","w") as f: with open(SYNDAT_CORRECTED,"r") as myfile: for line in myfile.readlines(): if re.search(stormHour, line) and re.search(storm_id, line) and re.search("NHC", line) and re.search(STORM_NAME, line): f.write(line) if USE_MESSAGE == 'YES': for line in fileinput.FileInput("tmpvit",inplace=1): line = line.replace(yymmdd,yyyymmdd) print(line, end=' ') numvit = sum(1 for line in open(path.join(DATA,"tmpvit"))) if numvit == 0: logger.info("!!! ERROR: No vitals for storm_id = ---> $storm_id <---\n !!! and yymmddhh = ---> ${PDY}${cyc} <---\n was found in the tcvitals archive....") sys.exit(98) else: logger.info("Vitals record for requested run follows....") with open(DATA+"/tmpvit", 'r') as fin: logger.info(fin.read()) logger.info(DATA) deliver_file(path.join(DATA,"tmpvit"), path.join(datadir,"message1"),logger=logger) with open(datadir + "/nestnum",'w+') as f: f.write("3") with open(datadir + "/nstorms",'w+') as f: f.write("1") with open(datadir + "/stormdate",'w+') as f: f.write(yymmddhh) logger.info(os.getcwd()) deliver_file(path.join(DATA,"tmpvit"), "./tcvitals.as",logger=logger) with open("tmpvit", 'r') as fin: logger.info(fin.read()) with open("tcvitals.as", 'r') as f: name = f.readline().split() STORM_NAME = name[2] with open("./tcvitals.as", 'r') as fin: STORM_INT = str(fin.read())[67:69] with open("./tcvitals.as", 'r') as fin: STORM_DEPTH = str(fin.read())[94:95] logger.info('STORM_NAME='+str(STORM_NAME)) logger.info('STORM_INT='+str(STORM_INT)) logger.info('STORM_DEPTH='+str(STORM_DEPTH)) stormid=storm_id storm_name=STORM_NAME.lower() STORM_ID=storm_id.lower() # CENLA and CENLO are the parent domain center lat/lon (in degrees) CENLA=CEN_LAT CENLO=CEN_LON with open(datadir+"/domain.center","w+") as f: f.write(str(CENLA)+"\n"+str(CENLO)) deliver_file(datadir+"/domain.center", path.join(DATA, storm_id+"." + yyyy + mm + dd + CYCLE+".domain.center"),logger=logger) storm=STORM_NAME CLAT=CENLA CLON=CENLO ############### vortex initialization options ################## initopt=0 ############### vortex initialization options ################## ##################### cold start TC vortex ##################### if MODIN == "GFS": coldvortex="GFS" else: if GSID02 == "YES" or GSID03 == "YES": coldvortex="GDAS" else: coldvortex="HDAS" ##################### cold start TC vortex ##################### ################## relocation success flags #################### # The script must set these before exiting to indicate a warm or cold # start, or to veto cycling. # $warm_cold_flag: set to "warm" for a warm start or "cold" for a cold # start. All other values are invalid. warm_cold_flag="unknown" # $cold_ok: set to "yes" to intentionally veto cycling (such as for a # weak storm). Set to "no" if this is a cold start due to missing or # erroneous data (such as a missing wrfout file or an incomplete # track). cold_ok="no" ################## relocation success flags #################### # Control settings if FHR > 0: FGATH = FHR - 6 gesfhr_fgat = FHR else: FGATH = 0 gesfhr_fgat = 6 gdate=produtil.numerics.to_datetime_rel(6*-3600, yyyymmddhh) gm=gdate.strftime("%m") gd=gdate.strftime("%d") gh=gdate.strftime("%H") fdate=produtil.numerics.to_datetime_rel(FGATH*3600, yyyymmddhh) yyyy=fdate.strftime("%Y") yy=fdate.strftime("%y") im=fdate.strftime("%m") id=fdate.strftime("%d") ih=fdate.strftime("%H") edate=produtil.numerics.to_datetime_rel(FGATEND*3600, yyyymmddhh) ime=edate.strftime("%m") ide=edate.strftime("%d") ihe=edate.strftime("%H") domdir=datadir VIT=path.join(DATA,"tmpvit") logger.info(DATA) logger.info(VIT) logger.info("yyyy = " + str(yyyy)) logger.info("yy = " + str(yy)) logger.info("im = " + str(im)) logger.info("ih = " + str(ih)) # Set paths for runtime and save directories if FHR > 0: tmpdir = path.join(DATA, "3DVAR/nmmb_rel", storm+ "." + fdate) savdir = path.join(DATA, storm + "."+ yyyymmddhh, "gdas1.f"+fdate) else: tmpdir = path.join(DATA, "hwrf_rel", storm + "." + yyyymmddhh) savdir = path.join(DATA, storm + "." + yyyymmddhh, "ATMOS") vitdir=ATMOS_INPUT if not(path.isdir(savdir)): makedirs(savdir) # Set up directories nmmbUsh.removeall(tmpdir) makedirs(tmpdir) chdir(tmpdir) #find corresponding stormid from previous cycle: prev_date=produtil.numerics.to_datetime_rel(6*-3600, yyyymmddhh) prev_stormid=stormid if PARAFLAG == "YES": k = HISTDATA.rfind(".") HISTDATA_base = HISTDATA[:k] #Specify fixed field and data directories. testf_d1=path.join(HISTDATA,storm_id + "." + cycle_m06 + "." + "nmmb_hst_01_nio_0006h_00m_00.00s") # 6h earlier forecast testf_d2=path.join(HISTDATA,storm_id + "." + cycle_m06 + "." + "nmmb_hst_0" + str(n_st2) + "_nio_0006h_00m_00.00s") # 6h earlier forecast testf_d3=path.join(HISTDATA,storm_id + "." + cycle_m06 + "." + "nmmb_hst_0" + str(n_st3) + "_nio_0006h_00m_00.00s") # 6h earlier forecast if PARAFLAG == "YES": wait_time=5400 # wait 1.5 hours due to queue wait times elif not(path.isfile(testf_d1)): # cold start waitting time wait_time=2400 else: # warm start waitting time wait_time=1200 # check if the HWRF forecast at the end of the FGAT window is available tmp1 = FGATSTR + 6 tmp2 = FGATEND + 6 gesstr_fgat = FGATSTR + 6 gesend_fgat = FGATEND + 6 #teste_d1=testf_d1 if IO_FORM != 2 and GSID02 == "YES": deliver_file(path.join(savdir,"/log-anl/namelist.input"), "./namelist_analysis0.input",logger=logger) deliver_file("./namelist_analysis0.input", "./namelist.input",logger=logger) iflag_cold=0 if not ((path.isfile(testf_d1) and path.getsize(testf_d1)>0)) : iflag_cold=1 logger.info("Previous " + str(gesfhr_fgat) + " hour forecast is not located at " + str(testf_d1)) logger.info("This is a cold start. " + str(coldvortex) + " vortex will be used as first guess") logger.info("This script will contnue from step 2 onwards") warm_cold_flag="cold" elif FGATEND > FGATSTR and not(path.isfile(testf_d1) and path.getsize(testf_d1)>0) : iflag_cold=1 logger.info("Previous " + str(gesend_fgat) + " hour forecast is not located at " + str(teste_d1)) logger.info("Previous " + str(gesstr_fgat) + " to " + str(gesend_fgat) + " hours forecast") logger.info("with " + str(FGATINV) + " hourly output are needed for FGAT") logger.info("This is a cold start due to HWRF forecast storm being weak") logger.info("This is a cold start. " + str(coldvortex) + " vortex will be used as first guess") logger.info("This script will contnue from step 2 onwards") warm_cold_flag="cold" cold_ok="yes" else: # script will run to end of script st_int = STORM_INT st_depth = STORM_DEPTH logger.info("relocate line 469") logger.info("st_int=" + str(st_int)) logger.info("st_depth=" + str(st_depth)) if st_depth == 'S' and int(st_int) < 14: iflag_cold=1 logger.info("Previous " + str(gesfhr_fgat) + " hour forecast is located at " + testf_d1) logger.info("storm is a shallow and weak storm") logger.info(str(st_int)) logger.info(str(st_depth)) logger.info("This is a cold start. " + str(coldvortex) + " vortex will be used as first guess") logger.info("This script will contnue from step 2 onwards") warm_cold_flag="cold" cold_ok="yes" elif int(st_int) < 14: iflag_cold=1 logger.info( "Previous " + str(gesfhr_fgat) + " hour forecast is located at " + testf_d1) logger.info("storm is a weak storm, max wind speed < 14 m/s") logger.info(str(st_int)) logger.info(str(st_depth)) logger.info("This is a cold start" + str(coldvortex) + " vortex will be used as first guess") logger.info("This script will contnue from step 2 onwards") warm_cold_flag="cold" cold_ok="yes" else: logger.info( "Previous " + str(gesfhr_fgat) + " hour forecast is located at " + testf_d1) logger.info("This is not a cold start") logger.info("The script nmmb_relocate.sh will continue") warm_cold_flag="warm" # Copy executables to $tmpdir deliver_file(path.join(exedir,"hmon_diffwrf_3dvar"), "./hmon_diffwrf_3dvar.exe",logger=logger) deliver_file(path.join(exedir,"hmon_nemsio"), "./nems_bin_io.exe",logger=logger) deliver_file(path.join(exedir,"hmon_merge_nest_4x_step12_3n"),"./merge_nest_4x_step12_3n.x",logger=logger) deliver_file(path.join(exedir,"hmon_split1"), "./wrf_split.x",logger=logger) deliver_file(path.join(exedir,"hmon_create_trak_guess"), "./create_trak_guess.x",logger=logger) deliver_file(path.join(exedir,"hmon_pert_ct1"), "./hmon_pert_ct.x",logger=logger) deliver_file(VIT, "./tmpvit",logger=logger) with open ("./tmpvit", "r") as myfile: for line in myfile.readlines(): if re.search(storm_id, line, re.IGNORECASE): f = open("./tcvitals.as", 'w') f.write(line) f.close() if not (path.isfile("tcvitals.as") and path.getsize("tcvitals.as")>0) : logger.info("tcvitals file is missing - check the path names and the status of GFS forecast run") st_int=STORM_INT logger.info("relocate line 521") logger.info("st_int=" + str(st_int)) deliver_file(testf_d1, "./wrfout_d01",logger=logger) deliver_file(testf_d2, "./wrfout_d02",logger=logger) if path.isfile(testf_d3): deliver_file(testf_d3,"./wrfout_d03",logger=logger) if IO_FORM == 2: nmmbUsh.relocate_storm(logger, 1) else: nmmbUsh.nems_relocate(logger, 4, "wrfout", "old_hwrf", 1) if IO_FORM == 2: nmmbUsh.relocate_storm(logger, 2) if path.isfile(testf_d3): nmmbUsh.relocate_storm(logger, 3) else: nmmbUsh.nems_relocate(logger, 5, "wrfout", "old_hwrf", 2) if path.isfile(testf_d3): nmmbUsh.nems_relocate(logger, 6,"wrfout", "old_hwrf", 3) # correct data, regenerate inner nest data,test only remove_file(path.join(tmpdir,"roughness")) remove_file(path.join(tmpdir,"roughness1")) remove_file(path.join(tmpdir,"storm_pert_new")) nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir, "tcvitals.as"), 26: path.join(tmpdir, "old_hwrf_d01"), 36: path.join(tmpdir, "old_hwrf_d02"), 46: path.join(tmpdir, "old_hwrf_d03"), 56: path.join(tmpdir, "data_4x_hwrf"), 66: path.join(tmpdir, "roughness1"), 61: path.join(tmpdir, "30_degree_data")},logger=logger,force=True) gesfhr=6 ibgs=0 nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"merge_nest_4x_step12_3n.x"), 6, st_int, ibgs, CLAT, CLON, MODEL) nmmbUsh.delete_file("fort.*") if int(st_int) > 10: deliver_file(HISTDATA + "/" + storm_id + "." + yyyy + gm + gd + gh +".trak.hmon.atcfunix" , path.join(tmpdir, "hdas_atcfunix"),logger=logger) else: deliver_file(HISTDATA + "/" + storm_id + "." + yyyy + gm + gd + gh +".trak.hmon.atcfunix" , path.join(tmpdir, "hdas_atcfunix"),logger=logger) produtil.fileop.fortlink({11: path.join(tmpdir, "tcvitals.as")}, logger=logger, force=True) if stormid == prev_stormid: produtil.fileop.fortlink({12: path.join(tmpdir, "hdas_atcfunix")}, logger=logger, force=True) else: storm_idnum=str(stormid)[0:2] prev_storm_idnum=str(prev_stormid)[0:2] deliver_file(path.join(tmpdir, "hdas_atcfunix"), "fort.12",logger=logger) for line in fileinput.FileInput(f, inplace=1): line = line.replace(basin+", "+prev_storm_idnum, basin+", "+storm_idnum) print(line, end=' ') make_symlink(path.join(tmpdir,"trak.fnl.all"), "fort.30", True) produtil.fileop.fortlink({30: path.join(tmpdir,"trak.fnl.all")}, logger=logger, force=True) if FGATEND > FGATSTR: fhr=3 while fhr < gesend_fgat: fhr00="%03d" % (fhr) with open (path.join(tmpdir, "hdas_atcfunix"), "r") as myfile: gesfhrx = 0 for line in myfile.readlines(): if re.search("HMON, " + fhr00, line): gesfhrx = gesfhrx +1 if gesfhrx < 1: fhr=999 else: fhr=fhr + FGATINV else: fhr00="%03d" % (gesfhr_fgat) with open (path.join(tmpdir , "hdas_atcfunix"), "r") as myfile: gesfhrx = 0 for line in myfile.readlines(): if re.search("HMON, " + fhr00, line): gesfhrx = gesfhrx +1 if gesfhrx < 1: logger.info("There is no " + str(fhr00) + " Forecast Position from previous cycle") logger.info("create_trak_guess.x can not continue with relocation") logger.info("This is a cold start") warm_cold_flag="cold" cold_ok="yes" else: nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"create_trak_guess.x"), storm_id, str(hh)) nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"data_4x_hwrf"), 30: path.join(tmpdir, "trak.fnl.all"), 46: path.join(tmpdir,"old_hwrf_d01"), 56: path.join(tmpdir,"wrf_env"), 52: path.join(tmpdir,"rel_inform." + yyyymmddhh), 55: path.join(tmpdir,"vital_syn." + yyyymmddhh), 71: path.join(tmpdir,"storm_pert"), 85: path.join(tmpdir,"storm_radius")}, logger=logger, force=True) nmmbUsh.hmon_pgm(logger, path.join(tmpdir, "wrf_split.x"), str(gesfhr_fgat), str(ibgs), str(st_int), str(iflag_cold)) nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"wrf_env"), 46: path.join(tmpdir,"roughness1"), 71: path.join(tmpdir,"storm_pert"), 58: path.join(tmpdir,"storm_pert_new"), 14: path.join(tmpdir,"storm_size_p"), 23: path.join(tmpdir,"storm_sym"), 65: path.join(tmpdir,"storm_radius"), 35: path.join(tmpdir,"storm_pert_step1_1")}, logger=logger, force=True) nmmbUsh.hmon_pgm(logger, path.join(tmpdir, 'hmon_pert_ct.x'), gesfhr) nmmbUsh.delete_file("fort.*") logger.info("continue bogussing the storm in step2. This is not a cold start") #step2 if CASE_ROOT == "FORECAST": # check dependencies if GSID01 == "YES" and GSID02 == "NO" and GSID03 == "NO": WAIT_INFILE=DATA+ "/3DVAR/outreg/nmm_netcdf/gsi_cvs1." + storm + "." + yyyymmddhh + "/wrfanl." + yyyymmddhh minsize=100000000 else: WAIT_INFILE=savdir + "/ghost_d03_0000-00-00_00:00:00" minsize=1000000000 if produtil.fileop.wait_for_files([WAIT_INFILE],logger,maxwait=wait_time): sys.exit("timed out waiting for " + WAIT_INFILE) sys.exit("2") if not (path.isdir(tmpdir)): makedirs(tmpdir) chdir(tmpdir) # Copy the fixed file to $tmpdir deliver_file(path.join(exedir,"hmon_pert_ct1"), "./hmon_pert_ct.x",logger=logger) # Copy executable files to $tmpdir deliver_file(path.join(exedir,"hmon_diffwrf_3dvar"), "./hmon_diffwrf_3dvar.exe",logger=logger) deliver_file(path.join(exedir,"hmon_nemsio"), "./nems_bin_io.exe",logger=logger) deliver_file(path.join(exedir,"hmon_split1"), "./wrf_split.x",logger=logger) deliver_file(path.join(exedir,"hmon_merge_nest_4x_step12_3n"), "./merge_nest_4x_step12_3n.x",logger=logger) deliver_file(path.join(exedir,"hmon_anl_bogus_10m"), "./hmon_anl_bogus_10m.x",logger=logger) deliver_file(path.join(exedir,"hmon_anl_cs_10m"), "./hmon_anl_cs_10m.x",logger=logger) deliver_file(path.join(exedir,"hmon_anl_4x_step2"), "./hmon_anl_4x_step2.x",logger=logger) deliver_file(path.join(exedir,"hmon_inter_2to2"), "./hmon_inter_2to2.x",logger=logger) deliver_file(path.join(exedir,"hmon_inter_4to6"), "./hmon_inter_4to6.x",logger=logger) deliver_file(path.join(exedir,"hmon_create_trak_fnl"), "./create_trak_fnl.x",logger=logger) if MODEL == "HWRF": # Copy wrfnmm restart files if GSID01 == "YES" and GSID02 == "NO" and GSID03 == "NO": infile_d01=DATA + "/3DVAR/outreg/nmm_netcdf/gsi_cvs1." + storm + "." + yyyymmddhh + "/wrfanl." + yyyymmddhh else: infile_d01=savdir + "/wrfinput_d01" for waitfile in [infile_d01, savdir+"/wrfanl_d02_" + yyyy + "-" + im + "-" + id + "_" + ih + ":00:00", savdir + "/wrfanl_d03_" + yyyy + "-" + im + "-" + id + "_" + ih + ":00:00"] : if produtil.fileop.wait_for_files([waitfile],logger,maxwait=wait_time): logger.info("timed out waiting for " + str(waitfile)) sys.exit(2) if GSID02 == "YES": waitfile=savdir+"/ghost_d02_0000-00-00_00:00:00" if produtil.fileop.wait_for_files([waitfile],logger,maxwait=wait_time): logger.info("timed out waiting for " + str(waitfile)) sys.exit(2) if GSID03 == "YES": waitfile=savdir+"/ghost_d03_0000-00-00_00:00:00" if produtil.fileop.wait_for_files([waitfile],logger,maxwait=wait_time): logger.info("timed out waiting for " + str(waitfile)) sys.exit(2) deliver_file(infile_d01, "./wrfinput_d01",logger=logger) deliver_file(savdir+"/wrfanl_d02_" + yyyy + "-" + im + "-" + id + "_" + ih + ":00:00", "./wrfinput_d02",logger=logger) deliver_file(savdir+"/wrfanl_d03_" + yyyy + "-" + im + "-" + id + "_" + ih + ":00:00", "./wrfinput_d03",logger=logger) testf1_d1=data_tmp + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_01_nemsio_nps" # GFS analysis testf1_d2=data_tmp + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_02_nemsio_nps" # GFS analysis testf1_d3=data_tmp + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_03_nemsio_nps" # GFS analysis deliver_file(testf1_d1, "./wrfinput_d01",logger=logger) deliver_file(testf1_d2, "./wrfinput_d02",logger=logger) deliver_file(testf1_d3, "./wrfinput_d03",logger=logger) if GSID02 == "YES": deliver_file(path.join(savdir,"/ghost_d02_0000-00-00_00:00:00"), "./wrfghost_d02",logger=logger) if GSID03 == "YES": deliver_file(path.join(savdir,"/ghost_d03_0000-00-00_00:00:00"), "./wrfghost_d03",logger=logger) deliver_file(VIT, "./tmpvit",logger=logger) with open ("./tmpvit", "r") as myfile: for line in myfile.readlines(): if re.search(storm_id, line, re.IGNORECASE): deliver_file("./tmpvit", "./tcvitals.as",logger=logger) #with open ("tcvitals.as", "r") as myfile: # for line in myfile.readlines(): # st_int = str(line)[67:69] with open("tcvitals.as", 'r') as myfile: st_int = str(myfile.read())[67:69] logger.info("relocate line 731") logger.info("st_int=" + str(st_int)) domain_test=domdir+"/domain.center" if path.isfile(domain_test): deliver_file(domdir+"/domain.center" , tmpdir + "/domain.center",logger=logger) else: logger.info("Domain Center at " + str(domain_test) + " does not exist") logger.info("Check the directory " + str(domdir)+ " = provide domain.center") looger.info("And run this script again. Now exiting") err=911 sys.exit("Domain Center at " + domain + "_test does not exist") if IO_FORM == 2: nmmbUsh.relocate_storm(logger,1) else: nmmbUsh.nems_relocate(logger, 1,"wrfinput", "new_gfs", 1) if IO_FORM == 2: nmmbUsh.relocate_storm(logger, 2) nmmbUsh.relocate_storm(logger, 3) else: nmmbUsh.nems_relocate(logger, 2,"wrfinput", "new_gfs", 2) nmmbUsh.nems_relocate(logger, 3,"wrfinput", "new_gfs", 3) if IO_FORM != 2 and GSID02 == 'YES': nmmbUsh.deliver_file(savdir+"/log-ghost/namelist.input", "./namelist_ghost.input",logger=logger) if GSID02 == "YES": if IO_FORM == 2: nmmbUsh.relocate_storm(logger, 2) #run ./hmon_diffwrf_3dvar.exe" for d02 else: deliver_file("./namelist_ghost.input", "./namelist.input",logger=logger) with open("itag3",'w'): f.write(tmpdir="\n") f.write("wrfghost_d02\n") f.write("relocate\n") f.write(path.join(tmpdir,"new_ght_d02\n")) f.write(path.join(tmpdir,"domain.center\n")) nmmbUsh.nems_relocate(logger, 3, "wrfghost", "relocate", 2) if GSID03 == "YES": if IO_FORM == 2: nmmbUsh.relocate_storm(logger, 3) #run ./hmon_diffwrf_3dvar.exe" for d03 else: deliver_file("./namelist_ghost.input", "./namelist.input",logger=logger) with open("itag3",'w'): f.write(tmpdir="\n") f.write("wrfghost_d03\n") f.write("relocate\n") f.write(path.join(tmpdir,"new_ght_d03\n")) f.write(path.join(tmpdir,"domain.center\n")) nmmbUsh.nems_relocate(logger, 3, "wrfghost", "relocate", 3) # correct data, regenerate inner nest data nmmbUsh.delete_file("fort.*") if FIND_GFS_VORTEX != "NO": # Use HWRF scripts to find the GFS track, done in hwrf_gfs_track0 job: track0file=savdir = "/gfs-anl.atcfunix" # Wait for the file to exist. In NCO mode, the file already should exist # since the job is run in parallel with the dummy job, but takes less # time. Just in case, we wait up to 5 minutes here in NCO mode. # In non-NCO mode, we wait a half hour to allow jobs to run out of order. naptime=1800 if not produtil.fileop.wait_for_files([track0file],logger,maxwait=naptime): # The file did not exist. if PARAFLAG == "YES": sys.exit("timed out waiting for ", track0file) else: logger.info("Time out waiting for" + str(track0file) + ":will use tcvitals for the vortex relocation instead.") cmd=exe("./atcfunix") # create an empty track file produtil.run.checkrun(cmd, logger=logger) else: if not (path.isfile("tcvitals.as") and path.getsize("tcvitals.as")>0) : logger.info("Track file is empty; will use tcvitals instead: " + str(track0file)) deliver_file(track0file, "./atcfunix",logger=logger) else: # Use GFS track file #nmmbUsh.grep_file(yyyymmddhh, ATMOS_INPUT+"/gfso.t"+CYCLE+"z.cyclone.trackatcfunix", "./tmp11", re.IGNORECASE) nmmbUsh.grep_file(yyyymmddhh, gfstrackdir+"/trak.gfso.atcfunix."+yyyymmddhh, "./tmp11", re.IGNORECASE) nmmbUsh.grep_file(basin, "./tmp11", "./tmp12", re.IGNORECASE) nmmbUsh.grep_file("GFSO|PRE1|PRD1", "./tmp12", "./atcfunix", re.IGNORECASE) produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 12: path.join(tmpdir,"atcfunix"), 30: path.join(tmpdir,"trak.fnl.all_gfs")}, logger=logger, force=True) # run the executable create_trak_fnl.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir, "create_trak_fnl.x"), storm_id, yyyy, basin) remove_file(path.join(tmpdir,"roughness")) remove_file(path.join(tmpdir,"roughness2")) nmmbUsh.delete_file("fort*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"new_gfs_d01"), 36: path.join(tmpdir,"new_gfs_d02"), 46: path.join(tmpdir,"new_gfs_d03"), 56: path.join(tmpdir,"data_4x_gfs"), 66: path.join(tmpdir,"roughness2")}, logger=logger, force=True) gesfhr=6 ibgs=1 vobs=0 nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"merge_nest_4x_step12_3n.x"), gesfhr, vobs, ibgs , CLAT , CLON , MODEL ) nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"data_4x_gfs"), 30: path.join(tmpdir,"trak.fnl.all_gfs"), 46: path.join(tmpdir,"new_gfs_d01"), 56: path.join(tmpdir,"gfs_env"), 52: path.join(tmpdir,"rel_inform_gfs." + yyyymmddhh), 55: path.join(tmpdir,"vital_syn_gfs." + yyyymmddhh), 71: path.join(tmpdir,"storm_pert_gfs"), 65: path.join(tmpdir,"storm_radius"), 85: path.join(tmpdir,"storm_radius_gfs")}, logger=logger, force=True) gesfhr=6 ibgs=2 #check HWRF vortex testf_d1=tmpdir+"/storm_pert_new" if path.isfile(testf_d1): ibgs=1 # run the executable wrf_split.x logger.info("gesfhr_fgat=" + str(gesfhr_fgat)) logger.info("ibgs=" + str(ibgs)) logger.info("st_int=" + str(st_int)) logger.info("iflag_cold=" + str(iflag_cold)) nmmbUsh.hmon_pgm(logger, path.join(tmpdir, "wrf_split.x"), str(gesfhr_fgat), str(ibgs), str(st_int), str(iflag_cold)) gfs_flag=6 if path.isfile(testf_d1): logger.info(" This is not a cold start. The " + str(gesfhr_fgat) + "-h HWRF vortex will be used") gfs_flag=6 warm_cold_flag="warm" hwrf_vortex="yes" else: logger.info(" This is cold start. The " + str(gesfhr_fgat) + "-h HWRF vortex is absent") warm_cold_flag="cold" hwrf_vortex="no" if int(st_int) < 20: logger.info(" This is a cold start. The " + str(coldvortex) + " vortex will be used") gfs_flag=0 nmmbUsh.delete_file("fort.*") # correct data, regenerate inner nest data,test only deliver_file(path.join(tmpdir,"storm_pert_gfs"), path.join(tmpdir,"storm_pert"),logger=logger) deliver_file(path.join(tmpdir,"storm_radius_gfs"), path.join(tmpdir,"storm_radius"),logger=logger) deliver_file(path.join(tmpdir,"atcfunix"),path.join(tmpdir,"hdas_atcfunix"),logger=logger) deliver_file(path.join(tmpdir,"roughness2"), path.join(tmpdir,"roughness1"),logger=logger) produtil.fileop.fortlink({12: path.join(tmpdir,"atcfunix"), 11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"gfs_env"), 71: path.join(tmpdir,"storm_pert"), 58: path.join(tmpdir,"storm_pert_new"), 14: path.join(tmpdir,"storm_size_p"), 23: path.join(tmpdir,"storm_sym"), 46: path.join(tmpdir,"roughness1"), 65: path.join(tmpdir,"storm_radius"), 35: path.join(tmpdir,"storm_pert_step1_1")}, logger=logger, force=True) # run executable hmon_pert_ct.x ## sh ${utilscript}/setup.sh nmmbUsh.hmon_pgm(logger,path.join(tmpdir, 'hmon_pert_ct.x'), 6) nmmbUsh.delete_file("fort.*") if FIND_GFS_VORTEX != 'NO': # Use HWRF scripts to find the GFS track, done in hwrf_gfs_track0 job: track0file="$savdir/gfs-anl.atcfunix" # Wait for the file to exist. In NCO mode, the file already should exist # since the job is run in parallel with the dummy job, but takes less # time. Just in case, we wait up to 5 minutes here in NCO mode. # In non-NCO mode, we wait a half hour to allow jobs to run out of order. naptime=3000 waitfile=path.join(savdir,"gfs-anl.atcfunix") if not produtil.fileop.wait_for_files([waitfile],logger,maxwait=naptime): # The file did not exist. if PARAFLAG == "YES": sys.exit("timed out waiting for " + track0file) else: logger.info("Timed out waiting for " + str(track0file) + ": will use tcvitals for the vortex relocation instead.") open("./atcfunix",'w') else: if not( path.isfile(track0file) and path.size(track0file)>0): # The track file exists but is empty. That means we could not # find the GFS vortex. logger.info("Track file is empty; will use tcvitals instead: " + str(track0file)) deliver_file(track0file, "./atcfunix",logger=logger) else: # Use GFS track file #nmmbUsh.grep_file(yyyymmddhh, ATMOS_INPUT+"/gfso.t"+CYCLE+"z.cyclone.trackatcfunix", "./tmp11", re.IGNORECASE) nmmbUsh.grep_file(yyyymmddhh, gfstrackdir + "/trak.gfso.atcfunix."+yyyymmddhh, "./tmp11", re.IGNORECASE) nmmbUsh.grep_file(basin, "./tmp11", "./tmp12", re.IGNORECASE) nmmbUsh.grep_file("GFSO|PRE1|PRD1", "./tmp12", "./atcfunix", re.IGNORECASE) produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 12: path.join(tmpdir,"atcfunix"), 30: path.join(tmpdir,"trak.fnl.all_gfs_06")}, logger=logger, force=True) # run the executable create_trak_fnl.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"create_trak_fnl.x"), storm_id, yyyy, basin) testf_d1 = path.join(tmpdir, "storm_pert_new") if path.isfile(testf_d1): if hwrf_vortex == 'yes': logger.info("Previous HWRF " + str(gesfhr_fgat) + "-h forecast vortex (warm start) is located at " + str(testf_d1)) else: logger.info(str(coldvortex) + " vortex (cold start) is located at " + str(testf_d1)) nmmbUsh.delete_file("fort.*") remove_file(path.join(tmpdir,"flag_file")) remove_file(path.join(tmpdir,"flag_file2")) produtil.fileop.fortlink({14: path.join(tmpdir,"storm_size_p"), 11: path.join(tmpdir,"tcvitals.as"), 12: path.join(tmpdir,"hdas_atcfunix"), 26: path.join(tmpdir, "gfs_env"), 36: path.join(tmpdir, "wrf_env_new"), 71: path.join(tmpdir, "storm_pert_new"), 46: path.join(tmpdir, "roughness1"), 56: path.join(tmpdir, "new_data_4x"), 23: path.join(tmpdir, "storm_sym")}, logger=logger, force=True) if path.isfile(path.join(tmpdir, "trak.fnl.all") ) and path.getsize(path.join(tmpdir, "trak.fnl.all"))>0: produtil.fileop.fortlink({30: path.join(tmpdir, "trak.fnl.all")}, logger=logger, force=True) else: produtil.fileop.fortlink({30: path.join(tmpdir, "trak.fnl.all_gfs_06")}, logger=logger, force=True) nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_anl_4x_step2.x"), str(gesfhr_fgat), str(gfs_flag), str(initopt)) deliver_file(path.join(tmpdir, "storm_radius"), path.join(tmpdir, "storm_radius_1"),logger=logger) # cold start: using GFS vortex testf2=path.join(tmpdir,"flag_file2") if path.isfile(testf2) and gfs_flag> 2 and MODIN == "GFS": gfs_flag=0 nmmbUsh.delete_file("fort.*") remove_file(path.join(tmpdir,"flag_file")) remove_file(path.join(tmpdir,"flag_file2")) remove_file(path.join(tmpdir,"storm_pert_new")) deliver_file(path.join(tmpdir, "storm_pert_gfs"), path.join(tmpdir, "storm_pert"),logger=logger) deliver_file(path.join(tmpdir, "storm_radius_gfs"), path.join(tmpdir, "storm_radius"),logger=logger) deliver_file(path.join(tmpdir, "atcfunix"), path.join(tmpdir, "hdas_atcfunix"),logger=logger) deliver_file(path.join(tmpdir, "roughness2"), path.join(tmpdir, "roughness1"),logger=logger) produtil.fileop.fortlink({12: path.join(tmpdir,"atcfunix"), 11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"gfs_env"), 71: path.join(tmpdir,"storm_pert"), 58: path.join(tmpdir,"storm_pert_new"), 14: path.join(tmpdir,"storm_size_p"), 23: path.join(tmpdir,"storm_sym"), 46: path.join(tmpdir,"roughness1"), 65: path.join(tmpdir,"storm_radius"), 35: path.join(tmpdir,"storm_pert_step1_1")}, logger=logger, force=True) gesfhr=6 # run executable hmon_pert_ct.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_pert_ct.x"), gesfhr) nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({14: path.join(tmpdir, "storm_size_p"), 11: path.join(tmpdir, "tcvitals.as"), 12: path.join(tmpdir, "atcfunix"), 26: path.join(tmpdir, "gfs_env"), 36: path.join(tmpdir, "wrf_env_new"), 71: path.join(tmpdir, "storm_pert_new"), 46: path.join(tmpdir, "roughness2"), 56: path.join(tmpdir, "new_data_4x"), 23: path.join(tmpdir,"storm_sym")}, logger=logger, force=True) gesfhr=6 # run the executable hmon_anl_4x_step2.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_anl_4x_step2.x"), gesfhr_fgat, gfs_flag, initopt) deliver_file(path.join(tmpdir,"storm_radius"),path.join(tmpdir,"storm_radius_1"),logger=logger) testf1 = path.join(tmpdir, "flag_file") if path.isfile(testf1): with open(path.join(tmpdir,testf1), 'r') as f: print(f.readline()) nmmbUsh.delete_file("fort.*") remove_file(path.join(tmpdir,"new_data_4x")) produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"wrf_env_new")}, logger=logger, force=True) produtil.fileop.fortcopy({71: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 72: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 73: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 74: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 75: path.join(fixdir,"hmon_storm_20"), 76: path.join(fixdir,"hmon_storm_20"), 77: path.join(fixdir,"hmon_storm_20"), 78: path.join(fixdir,"hmon_storm_cyn_axisy_47")}, logger=logger) for f in glob.glob("fort.7*"): os.chmod(f, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH) produtil.fileop.fortlink({85: path.join(tmpdir,"storm_radius"), 56: path.join(tmpdir,"new_data_4x"), 46: path.join(tmpdir,"roughness1"), 25: path.join(tmpdir,"test_data"), 23: path.join(tmpdir,"storm_sym")}, logger=logger, force=True) # run the executable hmon_anl_cs_10m.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_anl_cs_10m.x"), str(gesfhr), str(iflag_cold)) testf2 = path.join(tmpdir,"flag_file2") if path.isfile(testf2): with open(path.join(tmpdir,"flag_file2"), 'r') as fin: logger.info(fin.read()) nmmbUsh.delete_file("fort.*") remove_file(path.join(tmpdir,"new_data_4x")) produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"gfs_env"), 36: path.join(tmpdir,"data_4x_gfs"), 61: path.join(tmpdir,"storm_pert_gfs")}, logger=logger, force=True) produtil.fileop.fortcopy({71: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 72: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 73: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 74: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 75: path.join(fixdir,"hmon_storm_cyn_axisy_47"), 76: path.join(fixdir,"hmon_storm_20"), 77: path.join(fixdir,"hmon_storm_20"), 78: path.join(fixdir,"hmon_storm_cyn_axisy_47")}, logger=logger) os.chmod("fort.7*", stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH) produtil.fileop.fortlink({85: path.join(tmpdir,"storm_radius_gfs"), 46: path.join(tmpdir,"roughness2"), 56: path.join(tmpdir,"new_data_4x"), 25: path.join(tmpdir,"test_data")}, logger=logger, force=True) # run executable hmon_anl_bogus_10m.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_anl_bogus_10m.x"), gesfhr) deliver_file(path.join(tmpdir,"hmon_anl_bogus_10m.x"), path.join(tmpdir,"storm_radius_1"),logger=logger) if path.isfile(testf_d3): iflag_ghost=0 else: iflag_ghost=1 else: logger.info("This is a cold start, " + str(gesfhr_fgat) + "-h HWRF or " + str(coldvortex) + " vortex is absent") logger.info("Using bogus vortex") warm_cold_flag="cold" nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir, "gfs_env"), 36: path.join(tmpdir, "data_4x_gfs"), 61: path.join(tmpdir, "storm_pert_gfs")}, logger=logger, force=True) produtil.fileop.fortcopy({71: path.join(fixdir, "hmon_storm_cyn_axisy_47"), 72: path.join(fixdir, "hmon_storm_cyn_axisy_47"), 73: path.join(fixdir, "hmon_storm_cyn_axisy_47"), 74: path.join(fixdir, "hmon_storm_cyn_axisy_47"), 75: path.join(fixdir, "hmon_storm_cyn_axisy_47"), 76: path.join(fixdir, "hmon_storm_20"), 77: path.join(fixdir, "hmon_storm_20"), 78: path.join(fixdir, "hmon_storm_cyn_axisy_47")}, logger=logger) for f in glob.glob("fort.7*"): os.chmod(f, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH) produtil.fileop.fortlink({85: path.join(tmpdir,"storm_radius_gfs"), 46: path.join(tmpdir,"roughness2"), 56: path.join(tmpdir,"new_data_4x"), 25: path.join(tmpdir,"test_data")}, logger=logger, force=True) gesfhr=6 # run executable hmon_anl_bogus_10m.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir, "hmon_anl_bogus_10m.x"),gesfhr) logger.info("#######################################") logger.info(str(GSID02) + " " + str(GSID03)) if GSID02 == "NO" and GSID03 == "NO": # interpoplate to d02 nmmbUsh.delete_file("fort*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"new_data_4x"), 36: path.join(tmpdir,"new_gfs_d02"), 46: path.join(tmpdir,"new_gfs_d01"), 56: path.join(tmpdir,"data_merge_d02")}, logger=logger, force=True) # run the executable hmon_inter_2to2.x gesfhr=6 iflag=1 nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_inter_2to2.x"), gesfhr, iflag) # interpolate to d03 nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"new_data_4x"), 36: path.join(tmpdir,"new_gfs_d03"), 46: path.join(tmpdir,"new_gfs_d01"), 56: path.join(tmpdir,"data_merge_d03")}, logger=logger, force=True) # run the executable hmon_inter_2to2.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_inter_2to2.x"), gesfhr, iflag) else: # interpolate new_data_4x to ghost_d02, ghost_d03 if GSID02 == "YES": nmmbUsh.delete_file("fort.*") produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 26: path.join(tmpdir,"new_data_4x"), 36: path.join(tmpdir,"new_ght_d02"), 46: path.join(tmpdir,"new_gfs_d01"), 56: path.join(tmpdir,"data_merge_g02")}, logger=logger, force=True) gesfhr=6 iflag=1 # run the executable hmon_inter_2to2.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_inter_2to2.x"), gesfhr, iflag) nmmbUsh.delete_file("fort*") remove_file(path.join(tmpdir,"flag_file")) produtil.fileop.fortlink({11: path.join(tmpdir,"tcvitals.as"), 36: path.join(tmpdir,"new_data_4x"), 46: path.join(tmpdir,"new_gfs_d01"), 26: path.join(tmpdir,"new_gfs_d01"), 85: path.join(tmpdir,"storm_radius_gfs"), 56: path.join(tmpdir,"data_merge_d01")}, logger=logger, force=True) gesfhr=6 # run executable hmon_inter_4to6.x nmmbUsh.hmon_pgm(logger, path.join(tmpdir,"hmon_inter_4to6.x"), gesfhr) nmmbUsh.delete_file("fort*") deliver_file(path.join(tmpdir,"storm_radius_gfs"), path.join(tmpdir,"storm_radius"),logger=logger) # replace updated model files in wrf files remove_file("itag1") remove_file("itag3") if IO_FORM != 2 and GSID02 == "YES" or GSID02 == "YES": deliver_file("./namelist_analysis0.input", "./namelist.input",logger=logger) if IO_FORM == 2: nmmbUsh.update_3dvar(logger, 1) else: nmmbUsh.nems_update(logger, 1,"wrfinput", "data_merge",1) if GSID02 == "NO" and GSID03 == "NO": if IO_FORM == 2: cmd = exe("./hmon_diffwrf_3dvar.exe")["3dvar_update"]["wrfinput_d02"]["data_merge_d02"] produtil.run.checkrun(cmd, logger=logger) cmd = exe("./hmon_diffwrf_3dvar.exe")["3dvar_update"]["wrfinput_d03"]["data_merge_d03"] produtil.run.checkrun(cmd, logger=logger) else: nmmbUsh.nems_update(logger, 2,"wrfinput", "data_merge", 2) nmmbUsh.nems_update(logger, 3,"wrfinput", "data_merge", 3) if GSID02 == 'YES': if IO_FORM == 2: nmmbUsh.update_3dvar(logger, 2) else: deliver_file("./namelist_ghost.input", "./namelist.input",logger=logger) nmmbUsh.nems_update(logger, 3,"wrfghost", "update",2) if GSID03 == 'YES': if IO_FORM == 2: nmmbUsh.update_3dvar(logger, 3) else: deliver_file("./namelist_ghost.input", "./namelist.input",logger=logger) logger.info("./nems_bin_io.exe itag wrfinput_d03 update data_merge_d03") nmmbUsh.nems_update(logger, 3,"wrfghost", "update",3) deliver_file(path.join(tmpdir,"tcvitals.as"), savdir+"/tcvitals."+yyyymmddhh,logger=logger) if GSID02 == "YES": deliver_file("./wrfghost_d02", "./wrfghost_d02_gsi",logger=logger) if GSID03 == "YES": deliver_file("./wrfghost_d03", "./wrfghost_d03_gsi",logger=logger) if GSID02 == "NO" and GSID03 == "NO": atmos_d=path.join(DATA,storm+"."+yyyymmddhh+"/ATMOS") nmmbUsh.removeall(atmos_d) makedirs(atmos_d) deliver_file("./wrfinput_d01",path.join(atmos_d,"wrfinput_d01"),logger=logger) deliver_file("./wrfinput_d02",path.join(atmos_d,"wrfanl_d02_"+yyyy+"-"+im+"-"+id+"_"+ih+":00:00"),logger=logger) deliver_file("./wrfinput_d03",path.join(atmos_d,"wrfanl_d03_"+yyyy+"-"+im+"-"+id+"_"+ih+":00:00"),logger=logger) # Output the "CYCLING STATUS" line. This line is parsed by the para # automation scripts so it must be correct. They must match the # hwrf_fgat_relocate_kick.shell script. if warm_cold_flag == "warm": # Warm start: cycling was used logger.info("CYCLING STATUS: WARM START") elif warm_cold_flag == "cold": # This is a cold start. Should it have been a cold start? if cold_ok == 'yes': # Intentional cold start, such as for a weak vortex logger.info("CYCLING STATUS: CYCLING VETOED") # There is a prior cycle and cycling was not vetoed, so this # # should have been a warm start. It is a cold start, so that # # means there is an error. # echo CYCLING STATUS: UNEXPECTED COLD START # echo A prior cycle was present, but this is a cold start, # echo and the cold_ok variable is set to $cold_ok instead of yes. # echo This is an error, unless you intentionally started mid-storm. else: # No prior cycle so cycling is impossible logger.info("CYCLING STATUS: NO PRIOR CYCLE") else: # Script error: the cycling status is uninitialized logger.info("CYCLING STATUS: UNKNOWN $warm_cold_flag" + str(cold_ok)) logger.info("relocate copy data to com directory") atmos_d=comdir produtil.fileop.deliver_file("./wrfinput_d01",atmos_d + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_01_nemsio",logger=logger) produtil.fileop.deliver_file("./wrfinput_d02",atmos_d + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_02_nemsio",logger=logger) produtil.fileop.deliver_file("./wrfinput_d03",atmos_d + "/" + storm_name + STORM_ID + "." + cycle + "." + "input_domain_03_nemsio",logger=logger) logger.info("relocate finished")