Commit fabb1c42 authored by Lorenz Huedepohl's avatar Lorenz Huedepohl Committed by Andreas Marek
Browse files

Loop over all possible domain decompositions

We got reports from a user that there were troubles with certain domain
decompositions. So far the tests only looked at (approximately) square
decompositions in column-major process order.

Now, a new class of tests loops over all possible decompositions
(row * col) for a given number of total tasks.

So far, we can not confirm that there are any problems, all
possibilities work as expected.
parent 939020cc
...@@ -145,8 +145,13 @@ program test ...@@ -145,8 +145,13 @@ program test
class(elpa_t), pointer :: e class(elpa_t), pointer :: e
#ifdef TEST_ALL_KERNELS #ifdef TEST_ALL_KERNELS
integer :: i integer :: i
#endif
#ifdef TEST_ALL_LAYOUTS
character(len=1), parameter :: layouts(2) = [ 'C', 'R' ]
integer :: i_layout
#endif #endif
integer :: kernel integer :: kernel
character(len=1) :: layout
#if defined(TEST_COMPLEX) && defined(__SOLVE_TRIDIAGONAL) #if defined(TEST_COMPLEX) && defined(__SOLVE_TRIDIAGONAL)
#ifdef WITH_MPI #ifdef WITH_MPI
...@@ -158,11 +163,27 @@ program test ...@@ -158,11 +163,27 @@ program test
call setup_mpi(myid, nprocs) call setup_mpi(myid, nprocs)
if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then
print *, "ELPA API version not supported"
stop 1
endif
#ifdef TEST_ALL_LAYOUTS
do i_layout = 1, size(layouts) ! layout loop
layout = layouts(i_layout)
do np_cols = 1, nprocs ! factor loop
if (mod(nprocs,np_cols) /= 0 ) then
cycle
endif
#else
layout = 'C'
do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 do np_cols = NINT(SQRT(REAL(nprocs))),2,-1
if(mod(nprocs,np_cols) == 0 ) exit if(mod(nprocs,np_cols) == 0 ) exit
enddo enddo
#endif
np_rows = nprocs/np_cols np_rows = nprocs/np_cols
assert(nprocs == np_rows * np_cols)
if (myid == 0) then if (myid == 0) then
print '((a,i0))', 'Matrix size: ', na print '((a,i0))', 'Matrix size: ', na
...@@ -170,11 +191,12 @@ program test ...@@ -170,11 +191,12 @@ program test
print '((a,i0))', 'Blocksize: ', nblk print '((a,i0))', 'Blocksize: ', nblk
print '((a,i0))', 'Num MPI proc: ', nprocs print '((a,i0))', 'Num MPI proc: ', nprocs
print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs
print '(a)', 'Process layout: ' // layout
print *,'' print *,''
endif endif
call set_up_blacsgrid(mpi_comm_world, my_blacs_ctxt, np_rows, np_cols, & call set_up_blacsgrid(mpi_comm_world, np_rows, np_cols, layout, &
nprow, npcol, my_prow, my_pcol) my_blacs_ctxt, my_prow, my_pcol)
call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, &
na_rows, na_cols, sc_desc, my_blacs_ctxt, info) na_rows, na_cols, sc_desc, my_blacs_ctxt, info)
...@@ -218,11 +240,6 @@ program test ...@@ -218,11 +240,6 @@ program test
np_cols, my_prow, my_pcol) np_cols, my_prow, my_pcol)
#endif #endif
if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then
print *, "ELPA API version not supported"
stop 1
endif
e => elpa_allocate() e => elpa_allocate()
call e%set("na", na, error) call e%set("na", na, error)
...@@ -382,7 +399,6 @@ program test ...@@ -382,7 +399,6 @@ program test
#endif #endif
call elpa_deallocate(e) call elpa_deallocate(e)
call elpa_uninit()
deallocate(a) deallocate(a)
#ifdef TEST_MATRIX_RANDOM #ifdef TEST_MATRIX_RANDOM
...@@ -397,6 +413,13 @@ program test ...@@ -397,6 +413,13 @@ program test
deallocate(ev_analytic) deallocate(ev_analytic)
#endif #endif
#ifdef TEST_ALL_LAYOUTS
end do ! factors
end do ! layouts
#endif
call elpa_uninit()
#ifdef WITH_MPI #ifdef WITH_MPI
call blacs_gridexit(my_blacs_ctxt) call blacs_gridexit(my_blacs_ctxt)
call mpi_finalize(mpierr) call mpi_finalize(mpierr)
......
...@@ -45,44 +45,45 @@ module test_blacs_infrastructure ...@@ -45,44 +45,45 @@ module test_blacs_infrastructure
contains contains
subroutine set_up_blacsgrid(mpi_comm_parent, my_blacs_ctxt, np_rows, & !c> void set_up_blacsgrid_f(int mpi_comm_parent, int np_rows, int np_cols, char layout,
np_cols, nprow, npcol, my_prow, my_pcol) !c> int* my_blacs_ctxt, int *my_prow, int *my_pcol);
subroutine set_up_blacsgrid(mpi_comm_parent, np_rows, np_cols, layout, &
my_blacs_ctxt, my_prow, my_pcol) bind(C, name="set_up_blacsgrid_f")
use test_util use test_util
implicit none implicit none
integer(kind=ik), intent(in) :: mpi_comm_parent integer(kind=c_int), intent(in), value :: mpi_comm_parent, np_rows, np_cols
integer(kind=ik), intent(inout) :: my_blacs_ctxt, np_rows, & character(len=1), intent(in), value :: layout
np_cols, nprow, npcol, my_prow, my_pcol integer(kind=c_int), intent(out) :: my_blacs_ctxt, my_prow, my_pcol
#ifdef WITH_MPI
integer :: np_rows_, np_cols_
#endif
if (layout /= 'R' .and. layout /= 'C') then
print *, "layout must be 'R' or 'C'"
stop 1
end if
my_blacs_ctxt = mpi_comm_parent my_blacs_ctxt = mpi_comm_parent
#ifdef WITH_MPI #ifdef WITH_MPI
call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) call BLACS_Gridinit(my_blacs_ctxt, layout, np_rows, np_cols)
call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) call BLACS_Gridinfo(my_blacs_ctxt, np_rows_, np_cols_, my_prow, my_pcol)
if (np_rows /= np_rows_) then
print *, "BLACS_Gridinfo returned different values for np_rows as set by BLACS_Gridinit"
stop 1
endif
if (np_cols /= np_cols_) then
print *, "BLACS_Gridinfo returned different values for np_cols as set by BLACS_Gridinit"
stop 1
endif
#else #else
np_rows = 1
np_cols = 1
my_prow = 0 my_prow = 0
my_pcol = 0 my_pcol = 0
#endif #endif
end subroutine end subroutine
!c> void set_up_blacsgrid_f(int mpi_comm_parent, int* my_blacs_ctxt,
!c> int *np_rows, int *np_cols, int *nprow, int *npcol,
!c> int *my_prow, int *my_pcol);
subroutine set_up_blacsgrid_f(mpi_comm_parent, my_blacs_ctxt, np_rows, &
np_cols, nprow, npcol, my_prow, my_pcol) &
bind(C, name="set_up_blacsgrid_f")
use iso_c_binding
implicit none
integer(kind=c_int), value :: mpi_comm_parent
integer(kind=c_int) :: my_blacs_ctxt, np_rows, &
np_cols, nprow, npcol, my_prow, my_pcol
call set_up_blacsgrid(mpi_comm_parent, my_blacs_ctxt, np_rows, &
np_cols, nprow, npcol, my_prow, my_pcol)
end subroutine
subroutine set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & subroutine set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, &
np_rows, np_cols, na_rows, & np_rows, np_cols, na_rows, &
na_cols, sc_desc, my_blacs_ctxt, info) na_cols, sc_desc, my_blacs_ctxt, info)
...@@ -91,9 +92,10 @@ module test_blacs_infrastructure ...@@ -91,9 +92,10 @@ module test_blacs_infrastructure
use test_util use test_util
implicit none implicit none
integer(kind=ik), intent(inout) :: na, nblk, my_prow, my_pcol, np_rows, & integer(kind=ik), intent(in) :: na, nblk, my_prow, my_pcol, np_rows, &
np_cols, na_rows, na_cols, sc_desc(1:9), & np_cols, &
my_blacs_ctxt, info my_blacs_ctxt, info
integer(kind=ik), intent(out) :: na_rows, na_cols, sc_desc(1:9)
#ifdef WITH_MPI #ifdef WITH_MPI
integer(kind=ik), external :: numroc integer(kind=ik), external :: numroc
integer(kind=ik) :: mpierr integer(kind=ik) :: mpierr
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment