program test_complex2 !------------------------------------------------------------------------------- ! Standard eigenvalue problem - COMPLEX version ! ! This program demonstrates the use of the ELPA module ! together with standard scalapack routines ! ! Copyright of the original code rests with the authors inside the ELPA ! consortium. The copyright of any additional modifications shall rest ! with their original authors, but shall adhere to the licensing terms ! distributed along with the original code in the file "COPYING". !------------------------------------------------------------------------------- use ELPA1 use ELPA2 implicit none include 'mpif.h' !------------------------------------------------------------------------------- ! Please set system size parameters below! ! na: System size ! nev: Number of eigenvectors to be calculated ! nblk: Blocking factor in block cyclic distribution !------------------------------------------------------------------------------- integer, parameter :: nblk = 16 integer na, nev !------------------------------------------------------------------------------- ! Local Variables integer np_rows, np_cols, na_rows, na_cols integer myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols integer i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol integer, external :: numroc real*8 err, errmax real*8, allocatable :: ev(:), xr(:,:) complex*16 :: xc complex*16, allocatable :: a(:,:), z(:,:), tmp1(:,:), tmp2(:,:), as(:,:) complex*16, parameter :: CZERO = (0.d0,0.d0), CONE = (1.d0,0.d0) integer :: iseed(4096) ! Random seed, size should be sufficient for every generator !------------------------------------------------------------------------------- ! Pharse command line argumnents, if given character*16 arg1 character*16 arg2 na = 4000 nev = 1500 if (iargc() == 2) then call getarg(1, arg1) call getarg(2, arg2) read(arg1, *) na read(arg2, *) nev endif !------------------------------------------------------------------------------- ! MPI Initialization call mpi_init(mpierr) call mpi_comm_rank(mpi_comm_world,myid,mpierr) call mpi_comm_size(mpi_comm_world,nprocs,mpierr) !------------------------------------------------------------------------------- ! Selection of number of processor rows/columns ! We try to set up the grid square-like, i.e. start the search for possible ! divisors of nprocs with a number next to the square root of nprocs ! and decrement it until a divisor is found. do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 if(mod(nprocs,np_cols) == 0 ) exit enddo ! at the end of the above loop, nprocs is always divisible by np_cols np_rows = nprocs/np_cols if(myid==0) then print * print '(a)','Standard eigenvalue problem - COMPLEX version' print * print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs print * endif !------------------------------------------------------------------------------- ! Set up BLACS context and MPI communicators ! ! The BLACS context is only necessary for using Scalapack. ! ! For ELPA, the MPI communicators along rows/cols are sufficient, ! and the grid setup may be done in an arbitrary way as long as it is ! consistent (i.e. 0<=my_prow