Commit 009323b9 authored by Volker Blum's avatar Volker Blum
Browse files

VB: Added option to read in controlling parameters for the test*.f90 programs

    at runtime.

    If no input file is supplied when running any of the test* programs, the
    programs run with compiled-in default values as before.

    Optionally, a file 'test_parameters.in' can be supplied. In that case,
    each test* program checks for valid replacement parameters in the following format

    na 7000
    nev 3000
    nblk 32

    where:
    - the order of lines does not matter
    - any, all or none of the specified lines need be present
    - na is the marix dimension for which the test is run
    - nev is the number of desired eigenvalue/eigenvector pairs
    - nblk is the algorithmic block size, usually a relatively small number
      16, 32, 64 . Unreasonable values should be avoied, please check first.
parent 7decfbb3
......@@ -130,6 +130,32 @@ other piece of code.
- All files starting with test_... are for demonstrating the use
of the elpa library (but not needed for using it).
- The test_* programs build their own random matrices, solve the eigenvalue
problem and write timings.
There are three parameters that control the test_* programs:
- na = matrix dimension
- nev = number of eigenvalue / eigenvector pairs that is actually needed
- nblk = algorithmic block size, usually 16, 32 or 64
(important - do not set to unreasonable values)
These input parameters are set to default values na=4000, nev=1500, nblk=16
in the header of each source file test*.f90 .
Optionally, they can be controlled at runtime by supplying an input file
called 'test_parameters.in'.
This file can contain any or no lines at all (input values not specified in
'test_parameters.in' will be set to default values).
The format of test_parameters.in is simple, for instance:
na 8000
nev 6000
nblk 32
to change all values. Order of lines does not matter.
- All test programs solve a eigenvalue problem and check the correctnes
of the result by evaluating || A*x - x*lamba || and checking the
orthogonality of the eigenvectors
......
......@@ -36,29 +36,29 @@ LIBS = -L/opt/intel/Compiler/11.0/069/mkl/lib/em64t -lmkl_lapack -lmkl -lguide -
all: test_real read_real test_complex test_real_gen read_real_gen test_complex_gen test_real2 test_complex2
test_real: test_real.o elpa1.o
$(F90) -o $@ test_real.o elpa1.o $(LIBS)
test_real: test_real.o read_test_parameters.o elpa1.o
$(F90) -o $@ test_real.o read_test_parameters.o elpa1.o $(LIBS)
read_real: read_real.o elpa1.o
$(F90) -o $@ read_real.o elpa1.o $(LIBS)
test_complex: test_complex.o elpa1.o
$(F90) -o $@ test_complex.o elpa1.o $(LIBS)
test_complex: test_complex.o read_test_parameters.o elpa1.o
$(F90) -o $@ test_complex.o read_test_parameters.o elpa1.o $(LIBS)
test_real_gen: test_real_gen.o elpa1.o
$(F90) -o $@ test_real_gen.o elpa1.o $(LIBS)
test_real_gen: test_real_gen.o read_test_parameters.o elpa1.o
$(F90) -o $@ test_real_gen.o read_test_parameters.o elpa1.o $(LIBS)
read_real_gen: read_real_gen.o elpa1.o
$(F90) -o $@ read_real_gen.o elpa1.o $(LIBS)
test_complex_gen: test_complex_gen.o elpa1.o
$(F90) -o $@ test_complex_gen.o elpa1.o $(LIBS)
test_complex_gen: test_complex_gen.o read_test_parameters.o elpa1.o
$(F90) -o $@ test_complex_gen.o read_test_parameters.o elpa1.o $(LIBS)
test_real2: test_real2.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o
$(F90) -o $@ test_real2.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o $(LIBS)
test_real2: test_real2.o elpa1.o elpa2.o read_test_parameters.o elpa2_kernels.o blockedQR.o
$(F90) -o $@ test_real2.o read_test_parameters.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o $(LIBS)
test_complex2: test_complex2.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o
$(F90) -o $@ test_complex2.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o $(LIBS)
test_complex2: test_complex2.o read_test_parameters.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o
$(F90) -o $@ test_complex2.o read_test_parameters.o elpa1.o elpa2.o elpa2_kernels.o blockedQR.o $(LIBS)
test_real.o: test_real.f90 elpa1.o
$(F90) -c $<
......@@ -84,6 +84,9 @@ test_real2.o: test_real2.f90 elpa1.o elpa2.o
test_complex2.o: test_complex2.f90 elpa1.o elpa2.o
$(F90) -c $<
read_test_parameters.o: read_test_parameters.f90
$(F90) -c $<
elpa1.o: ../src/elpa1.f90
$(F90) -c $<
......@@ -97,4 +100,4 @@ elpa2_kernels.o: ../src/elpa2_kernels.f90
$(F90OPT) -c ../src/elpa2_kernels.f90
clean:
rm -f *.o *.mod test_real test_complex test_real_gen test_complex_gen test_real2 test_complex2 read_real read_real_gen
rm -f *.o *.mod test_real test_complex test_real_gen test_complex_gen test_real2 test_complex2 read_real read_real_gen read_test_parameters.o
subroutine read_test_parameters (na, nev, nblk, myid, mpi_comm)
!-------------------------------------------------------------------------------
! Subroutine read_test_parameters allows to set the marix parameters for the
! ELPA test programs at runtime. It is used by all the test programs which
! set up their test matrices as random matrices:
!
! - test_complex2.f90
! - test_complex.f90
! - test_complex_gen.f90
! - test_real2.f90
! - test_real.f90
! - test_real_gen.f90
!
! If an input file "test_parameters.in" is found, we scan it for any of the parameters:
!
! na : System size (matrix size)
! nev : Number of eigenvectors to be calculated
! nblk : Blocking factor in block cyclic distribution
!
! The following parameters are fixed and only pass on the MPI infrastructure and task information:
!
! myid : MPI task number (we only read on myid=0 and broadcast to all other)
! mpi_comm : MPI communicator ID
!
! Copyright of the original code rests with the authors inside the ELPA
! consortium. The copyright of any additional modifications shall rest
! with their original authors, but shall adhere to the licensing terms
! distributed along with the original code in the file "COPYING".
!
!-------------------------------------------------------------------------------
! this should be a simple read-in routine ... no use statements
implicit none
include 'mpif.h'
integer :: na, nev, nblk
integer :: myid, mpi_comm
!-------------------------------------------------------------------------------
! Local Variables
! For reading operations:
integer :: info, i_code
character*132 :: inputline
character*40 :: desc_str
! For MPI allreduce
integer :: temp_mpi
integer :: mpierr
!-------------------------------------------------------------------------------
! begin work
if (myid==0) then
! we only read on task 0
info=0
open (7,file='test_parameters.in',status='OLD',iostat=info)
if (info.ne.0) then
! File not found or not readable. In this case, we keep the defaults.
write(6,*)
write (6,'(2X,A)') "Input file 'test_parameters.in' not found. We keep the defaults for na, nev, nblk."
else
! File exists, attempt to read.
write(6,*)
write (6,'(2X,A)') "Attempting to read input file 'test_parameters.in' for values of na, nev, nblk."
lineloop: do
read(7,'(A)',iostat=i_code) inputline
if (i_code.lt.0) then
write (6,'(2X,A)') "| End of input file 'test_parameters.in' reached."
exit lineloop
else if (i_code.gt.0) then
write (6,'(1X,A)') "* Unknown error reading next line of input file 'test_parameters.in'."
exit lineloop
end if
! if we are here, inputline was read correctly. Next, we dissect it for its content.
read(inputline,*,iostat=i_code) desc_str
if (i_code /= 0) then
cycle ! empty line
elseif (desc_str(1:1).eq.'#') then
cycle ! comment
elseif (desc_str.eq."na") then
read(inputline,*,end=88,err=99) desc_str, na
write(6,'(2X,A,I15)') "| Found value for na : ", na
elseif (desc_str.eq."nev") then
read(inputline,*,end=88,err=99) desc_str, nev
write(6,'(2X,A,I15)') "| Found value for nev : ", nev
elseif (desc_str.eq."nblk") then
read(inputline,*,end=88,err=99) desc_str, nblk
write(6,'(2X,A,I15)') "| Found value for nblk : ", nblk
end if
enddo lineloop
close(7)
end if ! info == 0 (input file exists)
! Next, check current values for consistency.
if (na.le.0) then
write(6,*) "* Error - Found illegal value for na: ", na
write(6,*) "* na must be greater than zero - stopping the test run."
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
end if
if (nev.le.0) then
write(6,*) "* Error - Found illegal value for nev: ", nev
write(6,*) "* nev must be greater than zero - stopping the test run."
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
else if (nev.gt.na) then
write(6,*) "* Error - Found nev value that is greater than na. nev: ", nev
nev = na
write(6,*) "* Reducing nev to nev = na = ", nev, "."
end if
if (nblk.le.0) then
write(6,*) "* Error - Found illegal value for nblk: ", nblk
write(6,*) "* nblk must be greater than zero - stopping the test run."
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
else if (nblk.gt.na) then
write(6,*) "* Error - Found illegal value for nblk: ", nblk
write(6,*) "* nblk must be (much!) less than na - stopping the test run."
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
end if
else ! if we are not on myid=0 :
! for later allreduce, zero values on other processes
na = 0
nev = 0
nblk = 0
end if
! Synchronize all values. Note that, if a value was not read in the input file,
! a default value was already compiled into the code.
! synchronize na
temp_mpi = 0
call MPI_ALLREDUCE(na, temp_mpi, 1, &
MPI_INTEGER, MPI_SUM, mpi_comm, mpierr)
na = temp_mpi
! synchronize nev
temp_mpi = 0
call MPI_ALLREDUCE(nev, temp_mpi, 1, &
MPI_INTEGER, MPI_SUM, mpi_comm, mpierr)
nev = temp_mpi
! synchronize nblk
temp_mpi = 0
call MPI_ALLREDUCE(nblk, temp_mpi, 1, &
MPI_INTEGER, MPI_SUM, mpi_comm, mpierr)
nblk = temp_mpi
! normally the subroutine is done at this point
return
! Error traps for read statements
88 continue
if (myid == 0) then
write (*,*) "Syntax error reading 'test_parameters.in' (missing arguments?)."
write (*,*) "line: '"//trim(inputline)//"'"
end if
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
99 continue
if (myid == 0) then
write (*,*) "Syntax error reading 'test_parameters.in'."
write (*,*) "line: '"//trim(inputline)//"'"
end if
! harsh exit - but we can only get here from process number one
call MPI_Abort(mpi_comm, 0, mpierr)
end subroutine read_test_parameters
......@@ -24,7 +24,7 @@ program test_complex
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -52,6 +52,14 @@ program test_complex
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
......@@ -25,7 +25,7 @@ program test_complex2
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -53,6 +53,14 @@ program test_complex2
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
......@@ -25,7 +25,7 @@ program test_complex_gen
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -55,6 +55,14 @@ program test_complex_gen
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
......@@ -25,7 +25,7 @@ program test_real
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -49,6 +49,14 @@ program test_real
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
......@@ -26,7 +26,7 @@ program test_real2
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -50,6 +50,14 @@ program test_real2
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
......@@ -25,7 +25,7 @@ program test_real_gen
! nblk: Blocking factor in block cyclic distribution
!-------------------------------------------------------------------------------
integer, parameter :: na = 4000, nev = 1500, nblk = 16
integer :: na = 4000, nev = 1500, nblk = 16
!-------------------------------------------------------------------------------
! Local Variables
......@@ -51,6 +51,14 @@ program test_real_gen
call mpi_comm_rank(mpi_comm_world,myid,mpierr)
call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
!-------------------------------------------------------------------------------
! Reading of test parameters (matrix size, number of requested eigenvalue/eigenvector
! pairs, block size) from a file 'test_parameters.in', if that file exists.
! We only read on mpi task number myid = 0 to avoid any possible confusion.
! The parameters of interest are subsequently broadcast to all other mpi tasks.
call read_test_parameters (na,nev,nblk,myid,mpi_comm_world)
!-------------------------------------------------------------------------------
! Selection of number of processor rows/columns
! We try to set up the grid square-like, i.e. start the search for possible
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment