## If you have multiple mpi biulds on a single machine ## this example may be relevant to you. ## Ex: The hydro-c1 machine has mpi and netcdf built against portland ## fortran in the PATH. However mpi and netcd built against intel ## fortran is also available. Here's how I build WRF HYDRO against ## intel ## Below, in this file, I make the changes: ## COMPILER90 = $(ifortCompiler90) ## LDFLAGS = $(ifortLdFlags) ## NETCDFINC = $(ifortNetcdfInc) ## NETCDFLIB = -L$(ifortNetcdfLib) -lnetcdff -lnetcdf ## In my ~/.bashrc I have ## ## WRF HYDRO ## export NETCDF=/opt/netcdf ## export WRF_HYDRO=1 ## export HYDRO_D=1 ## ### manage ifort on hydro ## export ifortNetcdfLib="/opt/netcdf-4.3.0+ifort-12.1/lib/" ## export ifortNetcdfInc="/opt/netcdf-4.3.0+ifort-12.1/include/" ## # RPATH for ifort (pgi is already default so no need) ## ifortMpiLib="/opt/openmpi-1.10.0-intel/lib/" ## export ifortLdFlags="-Wl,-rpath,${ifortNetcdfLib}:${ifortMpiLib} -L${ifortNetcdfLib} -L${ifortMpiLib}" ## export ifortCompiler90='/opt/openmpi-1.10.0-intel/bin/mpif90' ## # Aliases for invoking ifort ## alias impirun='/opt/openmpi-1.10.0-intel/bin/mpirun' ## alias iman='man -M/opt/openmpi+intel/man' ## # Bonus: Check your wrf hydro environment - up you to maintain to your needs. ## alias henv='printenv | egrep -i "(HYDRO|NUDG|PRECIP|CHAN_CONN|^NETCDF|^LDFLAGS|^ifort)" | egrep -v PWD' .IGNORE: ifeq ($(SPATIAL_SOIL),1) SPATIAL_SOIL = -DSPATIAL_SOIL else SPATIAL_SOIL = endif ifeq ($(HYDRO_REALTIME),1) HYDRO_REALTIME = -DHYDRO_REALTIME else HYDRO_REALTIME = endif ifeq ($(WRF_HYDRO),1) WRF_HYDRO = -DWRF_HYDRO $(HYDRO_REALTIME) else WRF_HYDRO = endif ifeq ($(WRF_HYDRO_RAPID),1) WRF_HYDRO = -DWRF_HYDRO -DWRF_HYDRO_RAPID $(HYDRO_REALTIME) endif ifeq ($(HYDRO_D),1) HYDRO_D = -DHYDRO_D $(WRF_HYDRO) else HYDRO_D = $(WRF_HYDRO) endif ifeq ($(WRF_HYDRO_NUDGING),1) WRF_HYDRO_NUDGING = -DWRF_HYDRO_NUDGING else WRF_HYDRO_NUDGING = endif ifeq ($(OUTPUT_CHAN_CONN),1) OUTPUT_CHAN_CONN = -DOUTPUT_CHAN_CONN else OUTPUT_CHAN_CONN = endif ifeq ($(PRECIP_DOUBLE),1) PRECIP_DOUBLE = -DPRECIP_DOUBLE else PRECIP_DOUBLE = endif ifeq ($(NCEP_WCOSS),1) NCEP_WCOSS = -DNCEP_WCOSS else NCEP_WCOSS = endif ifeq ($(NWM_META),1) NWM_META = -DNWM_META else NWM_META = endif RMD = rm -f COMPILER90 = ftn FORMAT_FREE = -FR BYTESWAPIO = -convert big_endian F90FLAGS = -dynamic -w -c -ftz -align all -fno-alias -fp-model precise $(FORMAT_FREE) $(BYTESWAPIO) MODFLAG = -I./ -I ../../MPP -I ../MPP -I ../mod # # Used for DMAPP # module load dmapp/7.0.1-1.0502.11080.8.76.ari #LDFLAGS = $(HDF5_LDFLAGS) -Wl,--whole-archive,-ldmapp,--no-whole-archive LDFLAGS = $(HDF5_LDFLAGS_C) -L$(NETCDF_LIB) -lnetcdff -lnetcdf CPPINVOKE = -fpp CPPFLAGS = -DMPP_LAND -I ../Data_Rec $(HYDRO_D) $(SPATIAL_SOIL) $(NWM_META) $(WRF_HYDRO_NUDGING) $(OUTPUT_CHAN_CONN) $(PRECIP_DOUBLE) $(NCEP_WCOSS) LIBS =$(Z_LIB) NETCDFINC = $(NETCDF_INC) NETCDFLIB = -L$(NETCDF_LIB) -lnetcdff -lnetcdf NETCDF_INC = /apps/prod/hpc-stack/intel-19.1.3.304/cray-mpich-8.1.4/netcdf/4.7.4/include NETCDF_LIB = /apps/prod/hpc-stack/intel-19.1.3.304/cray-mpich-8.1.4/netcdf/4.7.4/lib # flag from HRLDAS COMPILERF90 = $(COMPILER90) FREESOURCE = LIBJASPER = -ljpeg -L/scholar/kmanning/jasper/lib -ljasper INCJASPER = -I/scholar/kmanning/jasper/include NETCDFMOD = -I $(NETCDFINC) BZIP2 = YES BZIP2_LIB = -lbz2 BZIP2_INCLUDE = -I/usr/include CC = cc HYDRO_LIB = -L../../../lib -lHYDRO