Skip to content

Instantly share code, notes, and snippets.

@edbennett
Created April 19, 2018 09:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save edbennett/10214cca1cfedec74ea8f12878844805 to your computer and use it in GitHub Desktop.
Save edbennett/10214cca1cfedec74ea8f12878844805 to your computer and use it in GitHub Desktop.
Test program for halo exchange using mpi_f08 that gives unexpected warnings
module params
implicit none
save
#define KSIZE 12
integer, parameter :: ksize=KSIZE
#if !(defined(NP_X) && defined(NP_Y))
#error "NP_X, NP_Y, and NP_T must be defined for MPI compilation."
#endif
#if (ksize / NP_X) * NP_X != ksize
#error "ksize must be divisible by NP_X"
#elif (ksize / NP_Y) * NP_Y != ksize
#error "ksize must be divisible by NP_Y"
#endif
integer, parameter :: np_x=NP_X, np_y=NP_Y
integer, parameter :: ksizex_l = ksize / np_x
integer, parameter :: ksizey_l = ksize / np_y
end module params
module data
use params
implicit none
save
real :: test_array(0:ksizex_l+1, 0:ksizey_l+1)
real :: test_array_2(0:ksizex_l+1, 0:ksizey_l+1)
end module data
function pid(ip_x, ip_y) result(id)
use params
implicit none
integer, intent(in) :: ip_x, ip_y
integer :: id
id = ip_x + ip_y * np_x
end function pid
module comms
use params
use mpi_f08
implicit none
save
integer :: ip_x, ip_y, ip_global, np_global
type(MPI_Comm) :: comm
contains
subroutine start_halo_update(Array, tag, reqs)
!
integer, intent(in) :: tag
real, intent(inout) :: Array(0:ksizex_l+1, 0:ksizey_l+1)
type(MPI_Request), intent(out) :: reqs(8)
integer :: ip_xup, ip_xdn, ip_yup, ip_ydn
integer :: tag_offset
tag_offset = 4 * tag
! Start send and receive in x direction
call MPI_Cart_Shift(comm, 0, 1, ip_xdn, ip_xup)
call MPI_Isend(Array(ksizex_l, 1:ksizey_l), ksizey_l, MPI_Real, ip_xup, 0 + tag_offset, comm, reqs(1))
call MPI_Irecv(Array(0, 1:ksizey_l), ksizey_l, MPI_Real, ip_xdn, 0 + tag_offset, comm, reqs(2))
call MPI_Isend(Array(1, 1:ksizey_l), ksizey_l, MPI_Real, ip_xdn, 1 + tag_offset, comm, reqs(3))
call MPI_Irecv(Array(ksizex_l+1, 1:ksizey_l), ksizey_l, MPI_Real, ip_xup, 1 + tag_offset, comm, reqs(4))
! Start send and receive in y direction
call MPI_Cart_Shift(comm, 1, 1, ip_ydn, ip_yup)
call MPI_Isend(Array(1:ksizex_l, ksizey_l), ksizex_l, MPI_Real, ip_yup, 2 + tag_offset, comm, reqs(5))
call MPI_Irecv(Array(1:ksizex_l, 0), ksizex_l, MPI_Real, ip_ydn, 2 + tag_offset, comm, reqs(6))
call MPI_Isend(Array(1:ksizex_l, 1), ksizex_l, MPI_Real, ip_ydn, 3 + tag_offset, comm, reqs(7))
call MPI_Irecv(Array(1:ksizex_l, ksizey_l+1), ksizex_l, MPI_Real, ip_yup, 3 + tag_offset, comm, reqs(8))
end subroutine start_halo_update
subroutine init_MPI()
integer :: coords(2)
call MPI_init
! Check that we have the right number of processes
call MPI_comm_size(MPI_COMM_WORLD, np_global)
call MPI_comm_rank(MPI_COMM_WORLD, ip_global)
if (np_global .ne. NP_X * NP_Y) then
print *,"MPI dimensionality mismatch: ", NP_X, "*", NP_Y, "!=", np_global
call MPI_finalize
call exit(2)
end if
! Set up a Cartesian communicator; periodic boundaries, allow reordering
call MPI_cart_create(MPI_COMM_WORLD, 2, (/ NP_X, NP_Y /), &
& (/ .true., .true. /), .true., comm)
! Know where I am
call MPI_cart_coords(comm, ip_global, 2, coords)
ip_x = coords(1)
ip_y = coords(2)
return
end subroutine init_MPI
subroutine complete_halo_update(reqs)
type(MPI_Request), intent(inout) :: reqs(8)
call MPI_Waitall(8, reqs, MPI_Statuses_Ignore)
end subroutine complete_halo_update
end module comms
program test_halo_4_real
use params
use data
use comms
implicit none
integer :: ix, iy, i=0
integer :: pid
type(MPI_Request), dimension(8) :: reqs, reqs2
call init_MPI
test_array(0,:) = cmplx(-1,-1)
test_array(:,0) = cmplx(-2,-1)
test_array(ksizex_l+1,:) = cmplx(-1,-2)
test_array(:,ksizey_l+1) = cmplx(-2,-2)
! Set up local arrays
do iy=1,ksizey_l
do ix=1,ksizex_l
i = i + 1
test_array(ix, iy) = i
test_array_2(ix, iy) = pid(ip_x, ip_y)
end do
end do
! Communicate
call start_halo_update(test_array, 0, reqs)
call start_halo_update(test_array_2, 1, reqs2)
call complete_halo_update(reqs)
call complete_halo_update(reqs2)
! Check output
if (test_array(1,1) .ne. test_array(ksizex_l+1,1) .or. &
nint(test_array_2(ksizex_l+1,1)) &
.ne. pid(modulo(ip_x+1, np_x), ip_y)) then
print *, "Negative x update failed on process", ip_x, ip_y
end if
if (test_array(1,1) .ne. test_array(1,ksizey_l+1) .or. &
nint(test_array_2(1,ksizey_l+1)) &
.ne. pid(ip_x, modulo(ip_y+1, np_y))) then
print *, "Negative y update failed on process", ip_x, ip_y
end if
if (test_array(ksizex_l,1) .ne. test_array(0,1) .or. &
nint(test_array_2(0,1)) &
.ne. pid(modulo(ip_x-1, np_x), ip_y)) then
print *, "Positive x update failed on process", ip_x, ip_y
end if
if (test_array(1,ksizey_l) .ne. test_array(1,0) .or. &
nint(test_array_2(1,0)) &
.ne. pid(ip_x, modulo(ip_y-1, np_y))) then
print *, "Positive y update failed on process", ip_x, ip_y
end if
call MPI_Finalize
end program test_halo_4_real
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment