test_real.F90 7.86 KB
Newer Older
1
2
3
4
5
!    This file is part of ELPA.
!
!    The ELPA library was originally created by the ELPA consortium,
!    consisting of the following organizations:
!
6
7
!    - Max Planck Computing and Data Facility (MPCDF), formerly known as
!      Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG),
8
9
10
11
12
13
14
15
16
17
18
19
!    - Bergische Universität Wuppertal, Lehrstuhl für angewandte
!      Informatik,
!    - Technische Universität München, Lehrstuhl für Informatik mit
!      Schwerpunkt Wissenschaftliches Rechnen ,
!    - Fritz-Haber-Institut, Berlin, Abt. Theorie,
!    - Max-Plack-Institut für Mathematik in den Naturwissenschaften,
!      Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition,
!      and
!    - IBM Deutschland GmbH
!
!
!    More information can be found here:
20
!    http://elpa.mpcdf.mpg.de/
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
!
!    ELPA is free software: you can redistribute it and/or modify
!    it under the terms of the version 3 of the license of the
!    GNU Lesser General Public License as published by the Free
!    Software Foundation.
!
!    ELPA is distributed in the hope that it will be useful,
!    but WITHOUT ANY WARRANTY; without even the implied warranty of
!    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
!    GNU Lesser General Public License for more details.
!
!    You should have received a copy of the GNU Lesser General Public License
!    along with ELPA.  If not, see <http://www.gnu.org/licenses/>
!
!    ELPA reflects a substantial effort on the part of the original
!    ELPA consortium, and we ask you to respect the spirit of the
!    license that we chose: i.e., please contribute any changes you
!    may have back to the original ELPA library distribution, and keep
!    any derivatives of ELPA under the same license that we chose for
!    the original distribution, the GNU Lesser General Public License.
!
!
!>
!> Fortran test programm to demonstrates the use of
!> ELPA 1 real case library.
!> If "HAVE_REDIRECT" was defined at build time
!> the stdout and stderr output of each MPI task
!> can be redirected to files if the environment
!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set
!> to "true".
!>
!> By calling executable [arg1] [arg2] [arg3] [arg4]
!> one can define the size (arg1), the number of
!> Eigenvectors to compute (arg2), and the blocking (arg3).
!> If these values are not set default values (4000, 1500, 16)
!> are choosen.
!> If these values are set the 4th argument can be
!> "output", which specifies that the EV's are written to
!> an ascii file.
!>
61
program test_real_example
62
63
64
65
66
67
68
69
70
71
72
73
74
75

!-------------------------------------------------------------------------------
! Standard eigenvalue problem - REAL version
!
! This program demonstrates the use of the ELPA module
! together with standard scalapack routines
!
! Copyright of the original code rests with the authors inside the ELPA
! consortium. The copyright of any additional modifications shall rest
! with their original authors, but shall adhere to the licensing terms
! distributed along with the original code in the file "COPYING".
!
!-------------------------------------------------------------------------------

Andreas Marek's avatar
Andreas Marek committed
76
   use iso_c_binding
Andreas Marek's avatar
Andreas Marek committed
77
78
79

   use elpa

80
81
#ifdef HAVE_MPI_MODULE
   use mpi
82
   implicit none
83
84
85
86
#else
   implicit none
   include 'mpif.h'
#endif
87

88
89
90
91
92
93
94
   !-------------------------------------------------------------------------------
   ! Please set system size parameters below!
   ! na:   System size
   ! nev:  Number of eigenvectors to be calculated
   ! nblk: Blocking factor in block cyclic distribution
   !-------------------------------------------------------------------------------

Andreas Marek's avatar
Andreas Marek committed
95
   integer           :: nblk
Andreas Marek's avatar
Andreas Marek committed
96
   integer                          :: na, nev
97

Andreas Marek's avatar
Andreas Marek committed
98
   integer                          :: np_rows, np_cols, na_rows, na_cols
99

Andreas Marek's avatar
Andreas Marek committed
100
101
   integer                          :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols
   integer                          :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol
102

Andreas Marek's avatar
Andreas Marek committed
103
   integer, external                :: numroc
104

Andreas Marek's avatar
Andreas Marek committed
105
   real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:)
106

Andreas Marek's avatar
Andreas Marek committed
107
108
109
   integer                          :: iseed(4096) ! Random seed, size should be sufficient for every generator

   integer                          :: STATUS
110
   integer                          :: success
Andreas Marek's avatar
Andreas Marek committed
111
112
   character(len=8)                 :: task_suffix
   integer                          :: j
113

114
   integer, parameter               :: error_units = 0
115

116
   class(elpa_t), pointer           :: e
117
118
119
   !-------------------------------------------------------------------------------


120
   ! default parameters
Andreas Marek's avatar
Andreas Marek committed
121
122
   na = 1000
   nev = 500
123
   nblk = 16
124

125
126
127
   call mpi_init(mpierr)
   call mpi_comm_rank(mpi_comm_world,myid,mpierr)
   call mpi_comm_size(mpi_comm_world,nprocs,mpierr)
128
129

   do np_cols = NINT(SQRT(REAL(nprocs))),2,-1
130
     if(mod(nprocs,np_cols) == 0 ) exit
131
132
133
134
135
   enddo
   ! at the end of the above loop, nprocs is always divisible by np_cols

   np_rows = nprocs/np_cols

136
137
138
139
   ! initialise BLACS
   my_blacs_ctxt = mpi_comm_world
   call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols)
   call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol)
140
141
142
143

   if (myid==0) then
     print '(a)','| Past BLACS_Gridinfo.'
   end if
144
145
146
147
148
149
   ! determine the neccessary size of the distributed matrices,
   ! we use the scalapack tools routine NUMROC

   na_rows = numroc(na, nblk, my_prow, 0, np_rows)
   na_cols = numroc(na, nblk, my_pcol, 0, np_cols)

Andreas Marek's avatar
Andreas Marek committed
150

151
152
153
154
155
156
157
158
159
   ! set up the scalapack descriptor for the checks below
   ! For ELPA the following restrictions hold:
   ! - block sizes in both directions must be identical (args 4 a. 5)
   ! - first row and column of the distributed matrix must be on
   !   row/col 0/0 (arg 6 and 7)

   call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info)

   if (info .ne. 0) then
160
161
162
163
164
165
     write(error_units,*) 'Error in BLACS descinit! info=',info
     write(error_units,*) 'Most likely this happend since you want to use'
     write(error_units,*) 'more MPI tasks than are possible for your'
     write(error_units,*) 'problem size (matrix size and blocksize)!'
     write(error_units,*) 'The blacsgrid can not be set up properly'
     write(error_units,*) 'Try reducing the number of MPI tasks...'
166
167
     call MPI_ABORT(mpi_comm_world, 1, mpierr)
   endif
168
169
170
171
172
173
174
175
176
177

   if (myid==0) then
     print '(a)','| Past scalapack descriptor setup.'
   end if

   allocate(a (na_rows,na_cols))
   allocate(z (na_rows,na_cols))

   allocate(ev(na))

178
179
   ! we want different random numbers on every process
   ! (otherwise A might get rank deficient):
180

181
182
183
184
185
186
187
188
189
190
   iseed(:) = myid
   call RANDOM_SEED(put=iseed)
   call RANDOM_NUMBER(z)

   a(:,:) = z(:,:)

   if (myid == 0) then
     print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)'
   endif
   call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T
191
192

   !-------------------------------------------------------------------------------
Andreas Marek's avatar
Andreas Marek committed
193

Andreas Marek's avatar
Andreas Marek committed
194
   if (elpa_init(20171201) /= elpa_ok) then
Andreas Marek's avatar
Andreas Marek committed
195
196
197
     print *, "ELPA API version not supported"
     stop
   endif
198
   e => elpa_allocate()
Andreas Marek's avatar
Andreas Marek committed
199
200

   ! set parameters decribing the matrix and it's MPI distribution
201
202
203
204
205
206
207
208
209
210
   call e%set("na", na, success)
   call e%set("nev", nev, success)
   call e%set("local_nrows", na_rows, success)
   call e%set("local_ncols", na_cols, success)
   call e%set("nblk", nblk, success)
   call e%set("mpi_comm_parent", mpi_comm_world, success)
   call e%set("process_row", my_prow, success)
   call e%set("process_col", my_pcol, success)

   success = e%setup()
Andreas Marek's avatar
Andreas Marek committed
211
212
213
214

   call e%set("solver", elpa_solver_1stage, success)


215
216
217
218
219
220
221
222
   ! Calculate eigenvalues/eigenvectors

   if (myid==0) then
     print '(a)','| Entering one-step ELPA solver ... '
     print *
   end if

   call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only
Andreas Marek's avatar
Andreas Marek committed
223
   call e%eigenvectors(a, ev, z, success)
224
225
226
227
228
229

   if (myid==0) then
     print '(a)','| One-step ELPA solver complete.'
     print *
   end if

Andreas Marek's avatar
Andreas Marek committed
230
231
   call elpa_deallocate(e)
   call elpa_uninit()
232
233
234
235
236
237

   call blacs_gridexit(my_blacs_ctxt)
   call mpi_finalize(mpierr)

end