''' Program Name: build_webpage.py Contact(s): Mallory Row Abstract: This is run at the end of all step2 scripts in scripts/. This creates a job card to: 1) if needed, create website from EMC_verif-global template (webpage.tar) at specified user location on web server 2) send images to web server It then submits to the transfer queue. ''' import os import datetime import glob import shutil print("BEGIN: "+os.path.basename(__file__)) # Read in environment variables KEEPDATA = os.environ['KEEPDATA'] machine = os.environ['machine'] USHverif_global = os.environ['USHverif_global'] DATA = os.environ['DATA'] NET = os.environ['NET'] RUN = os.environ['RUN'] RUN_type = RUN.split('_')[0] QUEUESERV = os.environ['QUEUESERV'] ACCOUNT = os.environ['ACCOUNT'] PARTITION_BATCH = os.environ['PARTITION_BATCH'] webhost = os.environ['webhost'] webhostid = os.environ['webhostid'] webdir = os.environ['webdir'] print("Webhost: "+webhost) if RUN == 'fit2obs_plots': DATA = DATA.replace('/fit2obs_plots/data', '') webdir = webdir.replace( '/fits/horiz/'+os.environ['fit2obs_plots_expnlist'].split(' ')[1], '' ) web_fits_dir = os.path.join(DATA, RUN, 'fit2obs', 'web', 'fits') nimages = 0 for root, dirs, files in os.walk(web_fits_dir, topdown=False): nimages = nimages + len(glob.glob(os.path.join(root, '*.png'))) print("Webhost location: "+webdir) print("\nTotal images within "+web_fits_dir+": "+str(nimages)) else: image_list = os.listdir( os.path.join(DATA, RUN, 'metplus_output', 'images') ) nimages = len(image_list) print("Webhost location: "+webdir) print("\nTotal images in " +os.path.join(DATA, RUN, 'metplus_output', 'images')+": " +str(nimages)) # Set up job wall time information web_walltime = '180' walltime_seconds = datetime.timedelta(minutes=int(web_walltime)) \ .total_seconds() walltime = (datetime.datetime.min + datetime.timedelta(minutes=int(web_walltime))).time() # Create webpage templates for tropcyc def tropcyc_write_template_header(template_filename): """! Writes common webpage header information to template Args: template_filename - string of the full file path to write to Returns: """ template_type = template_filename.split('/')[-1].split('_')[0] template_file = open(template_filename, 'w') template_file.write( '\n' ) template_file.write( '\n' ) template_file.write('\n') template_file.write('\n') template_file.write( '\n' ) template_file.write('Home\n') template_file.write( '\n' ) template_file.write( '\n' ) template_file.write( '\n' ) template_file.write( '\n' ) template_file.write( '\n' ) template_file.write( '\n' ) template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.write( '\n' ) template_file.write('\n') template_file.close() def tropcyc_write_template_body1(template_filename): """! Writes common webpage body information to template before the javascript domain assignment portion Args: template_filename - string of the full file path to write to Returns: """ template_type = template_filename.split('/')[-1].split('_')[0] template_file = open(template_filename, 'a') template_file.write('\n') template_file.write('
\n') template_file.write('\n') template_file.write('
\n') template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.write('
\n') template_file.write( 'Left/Right arrow keys = Change forecast lead | Up/Down arrow keys ' +'= Change Storm\n' ) template_file.write( '
For information on tropical cyclone verification, ' +'\n' ) template_file.write('\n') template_file.write('\n') template_file.write('
\n') template_file.write('\n') template_file.write( '
\n' ) template_file.write('\n') template_file.write('\n') template_file.write('
\n') template_file.write(' \n') template_file.write('
\n') template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.write('\n') template_file.close() if RUN == 'tropcyc': import get_tc_info config_storm_list = os.environ['tropcyc_storm_list'].split(' ') # Check storm_list to see if all storms for basin and year # requested tc_dict = get_tc_info.get_tc_dict() storm_list = [] for config_storm in config_storm_list: config_storm_basin = config_storm.split('_')[0] config_storm_year = config_storm.split('_')[1] config_storm_name = config_storm.split('_')[2] if config_storm_name == 'ALLNAMED': for byn in list(tc_dict.keys()): if config_storm_basin+'_'+config_storm_year in byn: storm_list.append(byn) else: storm_list.append(config_storm) # Group storms by basin AL_storm_list, CP_storm_list, EP_storm_list, WP_storm_list = [], [], [], [] for storm in storm_list: basin = storm.split('_')[0] if basin == 'AL': AL_storm_list.append(storm) elif basin == 'CP': CP_storm_list.append(storm) elif basin == 'EP': EP_storm_list.append(storm) elif basin == 'WP': WP_storm_list.append(storm) basin_storms_dict = { 'AL': AL_storm_list, 'CP': CP_storm_list, 'EP': EP_storm_list, 'WP': WP_storm_list } # Create track and intensity error templates trackerr_template_dir = os.path.join(DATA, RUN, 'create_webpage_templates', 'trackerr') if not os.path.exists(trackerr_template_dir): os.makedirs(trackerr_template_dir) intensityerr_template_dir = os.path.join(DATA, RUN, 'create_webpage_templates', 'intensityerr') if not os.path.exists(intensityerr_template_dir): os.makedirs(intensityerr_template_dir) for basin in list(basin_storms_dict.keys()): basin_trackerr_filename = os.path.join(trackerr_template_dir, 'trackerr_'+basin+'.php') basin_intensityerr_filename = os.path.join(intensityerr_template_dir, 'intensityerr_'+basin+'.php') tropcyc_write_template_header(basin_trackerr_filename) tropcyc_write_template_header(basin_intensityerr_filename) tropcyc_write_template_body1(basin_trackerr_filename) tropcyc_write_template_body1(basin_intensityerr_filename) basin_trackerr_file = open(basin_trackerr_filename, 'a') basin_intensityerr_file = open(basin_intensityerr_filename, 'a') for storm in basin_storms_dict[basin]: basin = storm.split('_')[0] year = storm.split('_')[1] name = storm.split('_')[2] basin_trackerr_file.write('domains.push({\n') basin_trackerr_file.write( ' displayName: "'+name.title()+' ('+year+')",\n' ) basin_trackerr_file.write(' name: "'+storm+'",\n') basin_trackerr_file.write('});\n') basin_intensityerr_file.write('domains.push({\n') basin_intensityerr_file.write( ' displayName: "'+name.title()+' ('+year+')",\n' ) basin_intensityerr_file.write(' name: "'+storm+'",\n') basin_intensityerr_file.write('});\n') basin_trackerr_file.close() basin_intensityerr_file.close() tropcyc_write_template_body2(basin_trackerr_filename) tropcyc_write_template_body2(basin_intensityerr_filename) elif RUN == 'fit2obs_plots': exp1 = os.environ['fit2obs_plots_expnlist'].split(' ')[0] exp2 = os.environ['fit2obs_plots_expnlist'].split(' ')[1] fit2obs_plots_dir = os.path.join(DATA, RUN) # Make globalvars.php files for stat in ['bias', 'rmse']: stat_globvars_filename = os.path.join(fit2obs_plots_dir, stat+'_globalvars.php') with open(stat_globvars_filename, 'a') as stat_globvars_file: stat_globvars_file.write("\n") stat_globvars_file.write('\n') stat_globvars_file.write('\n') stat_globvars_file.write('\n') # Rename fit2obs images src_images_dir = os.path.join(DATA, RUN, 'fit2obs', 'web', 'fits') dest_images_dir = os.path.join(fit2obs_plots_dir, 'images') if not os.path.exists(dest_images_dir): os.makedirs(dest_images_dir) plot_type_list = [exp1, exp2, 'f00af06', 'f12af36', 'f24af48', 'timeout'] region_list = ['gl', 'nh', 'sh', 'tr', 'na', 'us', 'eu', 'as', 'all'] ob_type_dict = { 'ADPUPA': 'adp', 'ADPSFC': 'sfc', 'SFCSHP': 'shp', 'AIRCFT': 'acft', 'AIRCAR': 'acar' } for plot_type in plot_type_list: src_plot_type_dir = os.path.join(src_images_dir, 'time', plot_type) for ob_type in list(ob_type_dict.keys()): if ob_type == 'ADPUPA': var_list = ['t', 'z', 'q', 'w'] level_list = ['1000', '925', '850', '700', '500', '400', '300', '250', '200', '150', '100', '70', '50', '30', '20'] elif ob_type in ['ADPSFC', 'SFCSHP']: var_list = ['t', 'p', 'q', 'w'] level_list = ['sfc'] elif ob_type in ['AIRCAR', 'AIRCFT']: var_list = ['t', 'w'] level_list = ['1000', '700', '300'] for var in var_list: for level in level_list: if level == 'sfc': level_original = '' else: level_original = level for region in region_list: rmse_src = os.path.join( src_plot_type_dir, var+level_original+'.'+region+'.' +ob_type_dict[ob_type]+'.png' ) rmse_dest = os.path.join( dest_images_dir, 'rmse_'+ob_type+'_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) bias_src = os.path.join( src_plot_type_dir, var+'b'+level_original+'.'+region+'.' +ob_type_dict[ob_type]+'.png' ) bias_dest = os.path.join( dest_images_dir, 'bias_'+ob_type+'_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) if os.path.exists(rmse_src): shutil.copy(rmse_src, rmse_dest) if os.path.exists(bias_src): shutil.copy(bias_src, bias_dest) src_horizontal_dir = os.path.join(src_images_dir, 'horiz') plot_type_list = ['f00', 'f06', 'f12', 'f24', 'f36', 'f48', 'all-'+exp1, 'all-'+exp2] region_list = ['us', 'eu', 'as'] var_list = ['t', 'z', 'q', 'w'] level_list = ['925', '850', '700', '500', '200'] for var in var_list: for level in level_list: for region in region_list: for plot_type in plot_type_list: if plot_type == 'all-'+exp1: src_horizontal_plot_type_dir = os.path.join( src_horizontal_dir, exp1 ) else: src_horizontal_plot_type_dir = os.path.join( src_horizontal_dir, exp2 ) if 'all' in plot_type: rmse_src = os.path.join( src_horizontal_plot_type_dir, var+level+'.all.'+region+'.rmse.png' ) bias_src = os.path.join( src_horizontal_plot_type_dir, var+level+'.all.'+region+'.bias.png' ) else: rmse_src = os.path.join( src_horizontal_plot_type_dir, var+level+'.'+plot_type+'.'+region+'.rmse.png' ) bias_src = os.path.join( src_horizontal_plot_type_dir, var+level+'.'+plot_type+'.'+region+'.bias.png' ) rmse_dest = os.path.join( dest_images_dir, 'rmse_horizontal_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) bias_dest = os.path.join( dest_images_dir, 'bias_horizontal_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) if os.path.exists(rmse_src): shutil.copy(rmse_src, rmse_dest) if os.path.exists(bias_src): shutil.copy(bias_src, bias_dest) src_timevrt_dir = os.path.join(src_images_dir, 'time', 'timevrt') src_vert_dir = os.path.join(src_images_dir, 'vert') plot_type_list = ['f00', 'f12', 'f24', 'f36', 'f48', 'f60', 'f72', 'f84', 'f96', 'f108', 'f120', exp1+'_00Z', exp2+'_00Z', exp1+'_12Z', exp2+'_12Z','anl_ges_00Z', 'anl_ges_12Z', 'f12_f36', 'f24_f48'] region_list = ['gl', 'nh', 'sh', 'tr', 'na', 'us', 'eu', 'as', 'all'] level_list = ['t', 's'] var_list = ['t', 'z', 'q', 'w'] for var in var_list: for level in level_list: for region in region_list: for plot_type in plot_type_list: if plot_type[0] == 'f' and len(plot_type.split('_')) == 1: rmse_src = os.path.join( src_timevrt_dir, level+var+'.'+region+'.'+plot_type[1:]+'.png' ) bias_src = os.path.join( src_timevrt_dir, level+var+'b.'+region+'.'+plot_type[1:]+'.png' ) elif plot_type \ in [exp1+'_00Z', exp2+'_00Z', exp1+'_12Z', exp2+'_12Z']: exp = plot_type.split('_')[0] hr = plot_type.split('_')[1] if hr == '00Z': hr_original = '0z' else: hr_original = hr.lower() rmse_src = os.path.join( src_vert_dir, exp, level+var+'.'+hr_original+'.'+region+'.adp.png' ) bias_src = os.path.join( src_vert_dir, exp, level+var+'.'+hr_original+'.'+region+'.adp.png' ) elif plot_type \ in ['anl_ges_00Z', 'anl_ges_12Z', 'f12_f36', 'f24_f48']: if 'anl' in plot_type: rmse_src = os.path.join( src_vert_dir, exp1+'-'+exp2, level+var+'.f00.' +plot_type.split('_')[2].lower() +'.'+region+'.adp.png' ) bias_src = os.path.join( src_vert_dir, exp1+'-'+exp2, level+var+'.f00.' +plot_type.split('_')[2].lower() +'.'+region+'.adp.png' ) else: rmse_src = os.path.join( src_vert_dir, exp1+'-'+exp2, level+var+'.'+plot_type.split('_')[0] +'.'+region+'.adp.png' ) bias_src = os.path.join( src_vert_dir, exp1+'-'+exp2, level+var+'.'+plot_type.split('_')[0] +'.'+region+'.adp.png' ) rmse_dest = os.path.join( dest_images_dir, 'rmse_vertical_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) bias_dest = os.path.join( dest_images_dir, 'bias_vertical_'+var+'_'+level+'_' +plot_type+'_'+region+'.png' ) if os.path.exists(rmse_src): shutil.copy(rmse_src, rmse_dest) if os.path.exists(bias_src): shutil.copy(bias_src, bias_dest) # Create job card web_job_filename = os.path.join(DATA, 'batch_jobs', NET+'_'+RUN+'_web.sh') with open(web_job_filename, 'a') as web_job_file: web_job_file.write('#!/bin/sh'+'\n') web_job_file.write('set -x'+'\n') if machine == 'WCOSS2': web_job_file.write('cd $PBS_O_WORKDIR\n') web_job_file.write('ssh -q -l '+webhostid+' '+webhost+' " ls -l ' +webdir+' "'+'\n') web_job_file.write('if [ $? -ne 0 ]; then'+'\n') web_job_file.write(' echo "Making directory '+webdir+'"'+'\n') web_job_file.write(' ssh -q -l '+webhostid+' '+webhost +' "mkdir -p '+webdir+' "'+'\n') web_job_file.write(' sleep 30\n') web_job_file.write(' scp -q '+os.path.join(USHverif_global, 'webpage.tar')+' ' +webhostid+'@'+webhost+':'+webdir+'/.'+'\n') web_job_file.write(' ssh -q -l '+webhostid+' '+webhost +' "cd '+webdir+' ; tar -xvf webpage.tar "'+'\n') web_job_file.write(' ssh -q -l '+webhostid+' '+webhost +' "rm '+os.path.join(webdir, 'webpage.tar') +' "'+'\n') web_job_file.write('fi'+'\n') web_job_file.write('\n') if RUN == 'fit2obs_plots': web_job_file.write('scp -r '+ os.path.join(DATA, RUN, 'images') +' '+webhostid+'@'+webhost+':' +os.path.join(webdir, RUN_type, '.')+'\n') else: web_job_file.write('scp -r '+os.path.join(DATA, RUN, 'metplus_output', 'images') +' '+webhostid+'@'+webhost+':' +os.path.join(webdir, RUN_type, '.')+'\n') if RUN == 'tropcyc': for tropcyc_type in ['intensityerr', 'trackerr']: web_job_file.write( 'scp -r '+os.path.join(DATA, RUN, 'create_webpage_templates', tropcyc_type, tropcyc_type+'*.php') +' '+webhostid+'@'+webhost+':' +os.path.join(webdir, RUN_type, tropcyc_type, '.\n') ) elif RUN == 'fit2obs_plots': for stat in ['bias', 'rmse']: web_job_file.write('scp -r '+os.path.join(DATA, RUN, stat +'_globalvars.php') +' '+webhostid+'@'+webhost+':' +os.path.join(webdir, RUN_type, stat, '.\n') ) if KEEPDATA == 'NO': web_job_file.write('\n') web_job_file.write('cd ..\n') web_job_file.write('rm -rf '+RUN) # Submit job card os.chmod(web_job_filename, 0o755) web_job_output = web_job_filename.replace('.sh', '.out') web_job_name = web_job_filename.rpartition('/')[2].replace('.sh', '') print("Submitting "+web_job_filename+" to "+QUEUESERV) print("Output sent to "+web_job_output) if machine == 'WCOSS2': os.system('qsub -V -l walltime='+walltime.strftime('%H:%M:%S')+' ' +'-q '+QUEUESERV+' -A '+ACCOUNT+' -o '+web_job_output+' ' +'-e '+web_job_output+' -N '+web_job_name+' ' +'-l select=1:ncpus=1 '+web_job_filename) elif machine == 'HERA': os.system('sbatch --ntasks=1 --time='+walltime.strftime('%H:%M:%S')+' ' +'--partition='+QUEUESERV+' --account='+ACCOUNT+' ' +'--output='+web_job_output+' ' +'--job-name='+web_job_name+' '+web_job_filename) elif machine == 'JET': if webhost == 'emcrzdm.ncep.noaa.gov': print("ERROR: Currently "+machine.title()+" cannot connect to " +webhost) else: os.system('sbatch --ntasks=1 --time='+walltime.strftime('%H:%M:%S')+' ' +'--partition='+QUEUESERV+' --account='+ACCOUNT+' ' +'--output='+web_job_output+' ' +'--job-name='+web_job_name+' '+web_job_filename) elif machine in ["ORION", "HERCULES", "S4"]: if webhost == 'emcrzdm.ncep.noaa.gov': print("ERROR: Currently " + machine + " cannot connect to "+webhost) else: os.system('sbatch --ntasks=1 --time='+walltime.strftime('%H:%M:%S')+' ' +'--partition='+QUEUESERV+' --account='+ACCOUNT+' ' +'--output='+web_job_output+' ' +'--job-name='+web_job_name+' '+web_job_filename) print("END: "+os.path.basename(__file__))