Skip to content

Commit

Permalink
Formatting MPI calls and constants to standard layout
Browse files Browse the repository at this point in the history
  • Loading branch information
scrasmussen committed Sep 26, 2024
1 parent 7cbd1e4 commit e993ef5
Show file tree
Hide file tree
Showing 14 changed files with 423 additions and 423 deletions.
14 changes: 7 additions & 7 deletions src/CPL/LIS_cpl/module_lis_HYDRO.F
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ subroutine lis_cpl_HYDRO(n)
#endif
if(nlst(did)%nsoil < 1) then
write(6,*) "FATAL ERROR: nsoil is less than 1"
call hydro_stop("In module_lis_HYDRO.F module_lis_HYDRO() - nsoil is less than 1")
call hydro_stop("In module_lis_HYDRO.F module_lis_HYDRO() - nsoil is less than 1")
endif
allocate(nlst(did)%zsoil8(nlst(did)%nsoil))
nlst(did)%zsoil8(1) = -noah271_struc(n)%lyrthk(1)
Expand All @@ -65,15 +65,15 @@ subroutine lis_cpl_HYDRO(n)

#endif

CALL mpi_initialized( mpi_inited, ierr )
call MPI_Initialized( mpi_inited, ierr )
if ( .NOT. mpi_inited ) then
call MPI_INIT( ierr ) ! stand alone land model.
call MPI_Init( ierr ) ! stand alone land model.
if (ierr /= MPI_SUCCESS) stop "MPI_INIT"
call MPI_COMM_DUP(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
call MPI_Comm_dup(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
if (ierr /= MPI_SUCCESS) stop "MPI_COMM_DUP"
endif
call MPI_COMM_RANK( HYDRO_COMM_WORLD, my_id, ierr )
call MPI_COMM_SIZE( HYDRO_COMM_WORLD, numprocs, ierr )
call MPI_Comm_rank( HYDRO_COMM_WORLD, my_id, ierr )
call MPI_Comm_size( HYDRO_COMM_WORLD, numprocs, ierr )
endif

if(nlst(did)%rtFlag .eq. 0) return
Expand Down Expand Up @@ -184,7 +184,7 @@ subroutine lis_cpl_HYDRO(n)
enddo

#ifdef HYDRO_D
write(6,*) "NDHMS lis date ", LIS_rc%yr, LIS_rc%mo, LIS_rc%da, LIS_rc%hr, LIS_rc%mn, LIS_rc%ss
write(6,*) "NDHMS lis date ", LIS_rc%yr, LIS_rc%mo, LIS_rc%da, LIS_rc%hr, LIS_rc%mn, LIS_rc%ss
#endif
! write(11,*) "RT_DOMAIN(did)%stc",RT_DOMAIN(did)%stc(:,:,1)
! write(12,*) "noah271_struc(n)%noah%stc(1)",noah271_struc(n)%noah%stc(1)
Expand Down
14 changes: 7 additions & 7 deletions src/CPL/WRF_cpl/module_wrf_HYDRO.F90
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ subroutine wrf_cpl_HYDRO(HYDRO_dt,grid,its,ite,jts,jte)


#ifdef MPP_LAND
call MPI_COMM_DUP(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
call MPI_Comm_dup(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
call MPP_LAND_INIT(grid%e_we - grid%s_we - 1, grid%e_sn - grid%s_sn - 1)

call mpp_land_bcast_int1 (nlst(did)%nsoil)
Expand Down Expand Up @@ -194,9 +194,9 @@ subroutine wrf_cpl_HYDRO(HYDRO_dt,grid,its,ite,jts,jte)
#endif
else
do k = 1, nlst(did)%nsoil
RT_DOMAIN(did)%STC(:,:,k) = grid%TSLB(its:ite,k,jts:jte)
RT_DOMAIN(did)%smc(:,:,k) = grid%smois(its:ite,k,jts:jte)
RT_DOMAIN(did)%sh2ox(:,:,k) = grid%sh2o(its:ite,k,jts:jte)
RT_DOMAIN(did)%STC(:,:,k) = grid%TSLB(its:ite,k,jts:jte)
RT_DOMAIN(did)%smc(:,:,k) = grid%smois(its:ite,k,jts:jte)
RT_DOMAIN(did)%sh2ox(:,:,k) = grid%sh2o(its:ite,k,jts:jte)
end do
rt_domain(did)%infxsrt = grid%infxsrt(its:ite,jts:jte)
rt_domain(did)%soldrain = grid%soldrain(its:ite,jts:jte)
Expand All @@ -215,7 +215,7 @@ subroutine wrf_cpl_HYDRO(HYDRO_dt,grid,its,ite,jts,jte)
! update WRF variable after running routing model.
grid%sfcheadrt(its:ite,jts:jte) = rt_domain(did)%overland%control%surface_water_head_lsm

! provide groundwater soil flux to WRF for fully coupled simulations (FERSCH 09/2014)
! provide groundwater soil flux to WRF for fully coupled simulations (FERSCH 09/2014)
if(nlst(did)%GWBASESWCRT .eq. 3 ) then
!Wei Yu: comment the following two lines. Not ready for WRF3.7 release
!yw grid%qsgw(its:ite,jts:jte) = gw2d(did)%qsgw
Expand Down Expand Up @@ -249,7 +249,7 @@ subroutine wrf2lsm (z1,v1,kk1,z,vout,ix,jx,kk,vegtyp)
do j = 1, jx
do i = 1, ix
do k = 1, kk
call interpLayer(Z1,v1(i,1:kk1,j),kk1,Z(k),vout(i,j,k))
call interpLayer(Z1,v1(i,1:kk1,j),kk1,Z(k),vout(i,j,k))
end do
end do
end do
Expand All @@ -271,7 +271,7 @@ subroutine lsm2wrf (z1,v1,kk1,z,vout,ix,jx,kk,vegtyp)
do j = 1, jx
do i = 1, ix
do k = 1, kk
call interpLayer(Z1,v1(i,j,1:kk1),kk1,Z(k),vout(i,k,j))
call interpLayer(Z1,v1(i,j,1:kk1),kk1,Z(k),vout(i,k,j))
end do
end do
end do
Expand Down
2 changes: 1 addition & 1 deletion src/HYDRO_drv/module_HYDRO_drv.F90
Original file line number Diff line number Diff line change
Expand Up @@ -1821,7 +1821,7 @@ subroutine HYDRO_finish()
close(78)
#endif
call mpp_land_sync()
call MPI_finalize(ierr)
call MPI_Finalize(ierr)
stop
#else

Expand Down
2 changes: 1 addition & 1 deletion src/IO/netcdf_layer.F90
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ end function create_file_signature
end type NetCDF_serial_

type, extends(NetCDF_layer_) :: NetCDF_parallel_
integer :: MPI_communicator
integer :: MPI_Communicator
integer :: default_info = MPI_INFO_NULL
contains
procedure, pass(object) :: create_file => create_file_parallel
Expand Down
48 changes: 24 additions & 24 deletions src/Land_models/NoahMP/IO_code/module_hrldas_netcdf_io.F
Original file line number Diff line number Diff line change
Expand Up @@ -417,9 +417,9 @@ subroutine read_hrldas_hdrinfo(wrfinput_flnm, ix, jx, &
integer :: rank

#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F "// &
"read_hrldas_hdrinfo() - MPI_COMM_RANK"
"read_hrldas_hdrinfo() - MPI_Comm_rank"
#else
rank = 0
#endif
Expand Down Expand Up @@ -598,9 +598,9 @@ subroutine readland_hrldas(wrfinput_flnm, &
crocus_opt = local_crocus_opt ! setting module scope variable

#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F readland_hrldas()"// &
" - MPI_COMM_RANK"
" - MPI_Comm_rank"
#else
rank = 0
#endif
Expand All @@ -620,8 +620,8 @@ subroutine readland_hrldas(wrfinput_flnm, &
if (ierr /= 0) then
write(*,'("READLAND_HRLDAS: Problem opening wrfinput file: ''", A, "''")') trim(wrfinput_flnm)
#ifdef _PARALLEL_
call mpi_finalize(ierr)
if (ierr /= 0) write(*, '("Problem with MPI_finalize.")')
call MPI_Finalize(ierr)
if (ierr /= 0) write(*, '("Problem with MPI_Finalize.")')
#endif
stop "FATAL ERROR: In module_hrldas_netcdf_io.F readland_hrldas()"// &
" - Problem opening wrfinput file."
Expand Down Expand Up @@ -740,9 +740,9 @@ subroutine read_mmf_runoff(wrfinput_flnm, &
integer :: rank
#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F read_mmf_runoff()"// &
" - MPI_COMM_RANK"
" - MPI_Comm_rank"
#else
rank = 0
#endif
Expand All @@ -762,8 +762,8 @@ subroutine read_mmf_runoff(wrfinput_flnm, &
if (ierr /= 0) then
write(*,'("read_mmf_runoff: Problem opening wrfinput file: ''", A, "''")') trim(wrfinput_flnm)
#ifdef _PARALLEL_
call mpi_finalize(ierr)
if (ierr /= 0) write(*, '("Problem with MPI_finalize.")')
call MPI_Finalize(ierr)
if (ierr /= 0) write(*, '("Problem with MPI_Finalize.")')
#endif
stop "FATAL ERROR: In module_hrldas_netcdf_io.F read_mmf_runoff()"// &
" - Problem opening wrfinput file."
Expand Down Expand Up @@ -1513,9 +1513,9 @@ subroutine readinit_hrldas(netcdf_flnm, xstart, xend, ystart, yend, nsoil, sldpt
#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F"// &
" readinit_hrldas() - MPI_COMM_RANK"
" readinit_hrldas() - MPI_Comm_rank"
ierr = nf90_open_par(netcdf_flnm, NF90_NOWRITE, HYDRO_COMM_WORLD, MPI_INFO_NULL, ncid)
#else
Expand All @@ -1534,7 +1534,7 @@ subroutine readinit_hrldas(netcdf_flnm, xstart, xend, ystart, yend, nsoil, sldpt
#endif
endif
#ifdef _PARALLEL_
call mpi_finalize(ierr)
call MPI_Finalize(ierr)
#endif
stop "FATAL ERROR: In module_hrldas_netcdf_io.F readinit_hrldas()"// &
" - Problem opening netcdf file."
Expand Down Expand Up @@ -1658,9 +1658,9 @@ subroutine init_interp(xstart, xend, ystart, yend, nsoil, sldpth, var, nvar, src
integer :: rank
#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F init_interp()"// &
" - MPI_COMM_RANK."
" - MPI_Comm_rank."
#else
rank = 0
#endif
Expand Down Expand Up @@ -1964,15 +1964,15 @@ subroutine READFORC_HRLDAS(flnm_template, forcing_timestep, target_date, xstart,
#endif
if (ierr /= 0) then
#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F"// &
" READFORC_HRLDAS() - MPI_COMM_RANK"
" READFORC_HRLDAS() - MPI_Comm_rank"
if (rank == 0) then
#endif
write(*,'("A) Problem opening netcdf file: ''", A, "''")') trim(flnm)
#ifdef _PARALLEL_
endif
call mpi_finalize(ierr)
call MPI_Finalize(ierr)
#endif
stop "FATAL ERROR: In module_hrldas_netcdf_io.F READFORC_HRLDAS()"// &
" - Problem opening netcdf file"
Expand Down Expand Up @@ -3099,9 +3099,9 @@ subroutine prepare_restart_file_seq(outdir, version, igrid, llanduse, olddate, s

#ifdef _PARALLEL_

call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F"// &
" prepare_restart_file_seq() - MPI_COMM_RANK problem"
" prepare_restart_file_seq() - MPI_Comm_rank problem"

#else

Expand Down Expand Up @@ -3433,9 +3433,9 @@ subroutine read_restart(restart_flnm, &
restart_filename_remember = restart_flnm

#ifdef _PARALLEL_
call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F "// &
"read_restart() - MPI_COMM_RANK"
"read_restart() - MPI_Comm_rank"

ierr = nf90_open_par(trim(restart_flnm), NF90_NOWRITE, HYDRO_COMM_WORLD, MPI_INFO_NULL, ncid)
#else
Expand Down Expand Up @@ -3615,9 +3615,9 @@ subroutine get_from_restart_2d_float(parallel_xstart, parallel_xend, subwindow_x

#ifdef _PARALLEL_

call MPI_COMM_RANK(HYDRO_COMM_WORLD, rank, ierr)
call MPI_Comm_rank(HYDRO_COMM_WORLD, rank, ierr)
if (ierr /= MPI_SUCCESS) stop "FATAL ERROR: In module_hrldas_netcdf_io.F "// &
"get_from_restart_2d_float() - MPI_COMM_RANK"
"get_from_restart_2d_float() - MPI_Comm_rank"

ierr = nf90_open_par(trim(restart_filename_remember), NF90_NOWRITE, HYDRO_COMM_WORLD, MPI_INFO_NULL, ncid)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ END SUBROUTINE wrf_error_fatal
SUBROUTINE wrf_abort
use module_cpl_land
integer ierr
CALL MPI_ABORT(HYDRO_COMM_WORLD,1,ierr)
call MPI_finalize(ierr)
call MPI_Abort(HYDRO_COMM_WORLD,1,ierr)
call MPI_Finalize(ierr)
STOP 'wrf_abort'
END SUBROUTINE wrf_abort

Expand Down
28 changes: 14 additions & 14 deletions src/MPP/CPL_WRF.F90
Original file line number Diff line number Diff line change
Expand Up @@ -47,17 +47,17 @@ subroutine CPL_LAND_INIT(istart,iend,jstart,jend)
data cyclic/.false.,.false./ ! not cyclic
data reorder/.false./

CALL mpi_initialized( mpi_inited, ierr )
call MPI_Initialized( mpi_inited, ierr )
if ( .NOT. mpi_inited ) then
call mpi_init(ierr)
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_INIT failed")
call MPI_COMM_DUP(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_COMM_DUP failed")
call MPI_Init(ierr)
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_Init failed")
call MPI_Comm_dup(MPI_COMM_WORLD, HYDRO_COMM_WORLD, ierr)
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_Comm_dup failed")
endif

call MPI_COMM_RANK( HYDRO_COMM_WORLD, my_global_id, ierr )
call MPI_COMM_SIZE( HYDRO_COMM_WORLD, total_pe_num, ierr )
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_COMM_RANK and/or MPI_COMM_SIZE failed")
call MPI_Comm_rank( HYDRO_COMM_WORLD, my_global_id, ierr )
call MPI_Comm_size( HYDRO_COMM_WORLD, total_pe_num, ierr )
if (ierr /= MPI_SUCCESS) call fatal_error_stop("MPI Error: MPI_Comm_rank and/or MPI_Comm_size failed")

allocate(node_info(9,total_pe_num))

Expand Down Expand Up @@ -98,7 +98,7 @@ subroutine CPL_LAND_INIT(istart,iend,jstart,jend)
call MPI_Cart_create(HYDRO_COMM_WORLD, ndim, dims, &
cyclic, reorder, cartGridComm, ierr)

call MPI_CART_GET(cartGridComm, 2, dims, cyclic, coords, ierr)
call MPI_Cart_get(cartGridComm, 2, dims, cyclic, coords, ierr)

p_up_down = coords(0)
p_left_right = coords(1)
Expand All @@ -116,21 +116,21 @@ subroutine send_info()

if(my_global_id .eq. 0) then
do i = 1, total_pe_num-1
call mpi_recv(node_info(:,i+1),size,MPI_INTEGER, &
call MPI_Recv(node_info(:,i+1),size,MPI_INTEGER, &
i,tag,HYDRO_COMM_WORLD,mpp_status,ierr)
enddo
else
call mpi_send(node_info(:,my_global_id+1),size, &
call MPI_Send(node_info(:,my_global_id+1),size, &
MPI_INTEGER,0,tag,HYDRO_COMM_WORLD,ierr)
endif

call MPI_barrier( HYDRO_COMM_WORLD ,ierr)
call MPI_Barrier( HYDRO_COMM_WORLD ,ierr)

size = 9 * total_pe_num
call mpi_bcast(node_info,size,MPI_INTEGER, &
call MPI_Bcast(node_info,size,MPI_INTEGER, &
0,HYDRO_COMM_WORLD,ierr)

call MPI_barrier( HYDRO_COMM_WORLD ,ierr)
call MPI_Barrier( HYDRO_COMM_WORLD ,ierr)

end subroutine send_info

Expand Down
20 changes: 10 additions & 10 deletions src/MPP/module_mpp_GWBUCKET.F90
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@ subroutine collectSizeInd(numbasns)

if(my_id .ne. IO_id) then
tag = 66
call mpi_send(numbasns,1,MPI_INTEGER, IO_id, &
call MPI_Send(numbasns,1,MPI_INTEGER, IO_id, &
tag,HYDRO_COMM_WORLD,ierr)
else
do i = 0, numprocs - 1
if(i .eq. IO_id) then
sizeInd(i+1) = numbasns
else
tag = 66
call mpi_recv(rcv,1,&
call MPI_Recv(rcv,1,&
MPI_INTEGER,i,tag,HYDRO_COMM_WORLD,mpp_status,ierr)

sizeInd(i+1) = rcv
Expand Down Expand Up @@ -81,10 +81,10 @@ subroutine gw_write_io_real(numbasns,inV,ind,outV)
if(my_id .ne. IO_id) then
if(numbasns .gt. 0) then
tag = 62
call mpi_send(inV,numbasns,MPI_REAL, IO_id, &
call MPI_Send(inV,numbasns,MPI_REAL, IO_id, &
tag,HYDRO_COMM_WORLD,ierr)
tag2 = 63
call mpi_send(ind,numbasns,MPI_INTEGER8, IO_id, &
call MPI_Send(ind,numbasns,MPI_INTEGER8, IO_id, &
tag2,HYDRO_COMM_WORLD,ierr)
endif
else
Expand All @@ -97,10 +97,10 @@ subroutine gw_write_io_real(numbasns,inV,ind,outV)
if(i .ne. IO_id) then
if(sizeInd(i+1) .gt. 0) then
tag = 62
call mpi_recv(vbuff(1:sizeInd(i+1)),sizeInd(i+1),&
call MPI_Recv(vbuff(1:sizeInd(i+1)),sizeInd(i+1),&
MPI_REAL,i,tag,HYDRO_COMM_WORLD,mpp_status,ierr)
tag2 = 63
call mpi_recv(ibuff(1:sizeInd(i+1)),sizeInd(i+1),&
call MPI_Recv(ibuff(1:sizeInd(i+1)),sizeInd(i+1),&
MPI_INTEGER8,i,tag2,HYDRO_COMM_WORLD,mpp_status,ierr)
do k = 1, sizeInd(i+1)
outV(ibuff(k)) = vbuff(k)
Expand Down Expand Up @@ -139,10 +139,10 @@ subroutine gw_write_io_int(numbasns,inV,ind,outV)
if(my_id .ne. IO_id) then
if(numbasns .gt. 0) then
tag = 62
call mpi_send(inV,numbasns,MPI_INTEGER8, IO_id, &
call MPI_Send(inV,numbasns,MPI_INTEGER8, IO_id, &
tag,HYDRO_COMM_WORLD,ierr)
tag2 = 63
call mpi_send(ind,numbasns,MPI_INTEGER8, IO_id, &
call MPI_Send(ind,numbasns,MPI_INTEGER8, IO_id, &
tag2,HYDRO_COMM_WORLD,ierr)
endif
else
Expand All @@ -155,10 +155,10 @@ subroutine gw_write_io_int(numbasns,inV,ind,outV)
if(i .ne. IO_id) then
if(sizeInd(i+1) .gt. 0) then
tag = 62
call mpi_recv(vbuff(1:sizeInd(i+1)),sizeInd(i+1),&
call MPI_Recv(vbuff(1:sizeInd(i+1)),sizeInd(i+1),&
MPI_INTEGER8,i,tag,HYDRO_COMM_WORLD,mpp_status,ierr)
tag2 = 63
call mpi_recv(ibuff(1:sizeInd(i+1)),sizeInd(i+1),&
call MPI_Recv(ibuff(1:sizeInd(i+1)),sizeInd(i+1),&
MPI_INTEGER8,i,tag2,HYDRO_COMM_WORLD,mpp_status,ierr)
do k = 1, sizeInd(i+1)
outV(ibuff(k)) = vbuff(k)
Expand Down
Loading

0 comments on commit e993ef5

Please sign in to comment.