Unverified Commit 39e2a346 authored by Andreas Marek's avatar Andreas Marek
Browse files

Documentation and interfaces for public availabe functions

parent cddfb00c
.TH "elpa_cholesky_complex" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_cholesky_complex \- Cholesky factorization of a complex hermetian matrix
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_cholesky_complex\fP (na, a(lda,matrixCols), lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "complex*16, intent(inout) \fBa\fP: locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
#include <complex.h>
.br
.RI "\fBint\fP success = \fBelpa_cholesky_complex\fP (\fBint\fP na, \fB double complex *\fPa, \fBint\fP lda, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "double complex *\fBa\fP: pointer to locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "int \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Does a Cholesky factorization of a complex, hermetian matrix. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBa\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBlda\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
.TH "elpa_cholesky_real" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_cholesky_real \- Cholesky factorization of a real symmetric matrix
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_cholesky_real\fP (na, a(lda,matrixCols), lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "real*8, intent(inout) \fBa\fP: locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
.RI "\fBint\fP success = \fBelpa_cholesky_real\fP (\fBint\fP na, \fB double *\fPa, \fBint\fP lda, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "double *\fBa\fP: pointer to locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "int \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Does a Cholesky factorization of a real, symmetric matrix. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBa\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBlda\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
.TH "elpa_solve_tridi" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_solve_tridi \- Solve tridiagonal eigensystem with divide and conquer method
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_solve_trid\fP (na, nev, d(na), e(na), q(ldq,matrixCols), ldq, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "integer, intent(in) \fBnev\fP: number of eigenvalues/vectors to be computed"
.br
.RI "real*8, intent(inout) \fBd(na)\fP: array d(na) on input diagonal elements of tridiagonal matrix, on output the eigenvalues in ascending order"
.br
.RI "real*8, intent(in) \fBe(na)\fP: array e(na) on input subdiagonal elements of matrix, on exit destroyed"
.br
.RI "real*8, intent(inout) \fBq\fP: on exit \fBq\fP contains the eigenvectors. The local dimensions are \fBldq\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBldq\fP: leading dimension of locally distributed matrix \fBq\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
.RI "\fBint\fP success = \fBelpa_solve_tridi\fP (\fBint\fP na, \fBint\fP nev, \fB double *\fPd,\fB double *\fPe ,\fB double *\fPq, \fBint\fP ldq, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "int \fBnev\fP: number of eigenvalues/eigenvectors to be computed"
.br
.RI "double *\fBd\fP: pointer to array d(na) on input diagonal elements of tridiagonal matrix, on output the eigenvalues in ascending order"
.br
.RI "double *\fBe\fP: pointer to array e(na) on input subdiagonal elements of matrix, on exit destroyed"
.br
.RI "double *\fBq\fP: on exit \fBq\fP contains the eigenvectors. The local dimensions are \fBldq\fP x \fBmatrixCols\fP"
.br
.RI "int \fBldq\fP: leading dimension of locally distributed matrix \fBq\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Solves a tri-diagonal matrix and returns \fBnev\fP eigenvalues/eigenvectors. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBq\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBldq\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
......@@ -100,16 +100,26 @@ module ELPA1
! imported from elpa1_auxilliary
public :: mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: elpa_mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_at_b_real !< old, deprecated interface to multiply real matrices A**T * B
public :: invert_trm_real !< Invert real triangular matrix
public :: invert_trm_complex !< Invert complex triangular matrix
public :: elpa_mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: mult_ah_b_complex !< old, deprecated interface to multiply complex matrices A**H * B
public :: cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_complex !< Cholesky factorization of a complex matrix
public :: elpa_invert_trm_real !< Invert real triangular matrix
public :: invert_trm_real !< old, deprecated interface to invert real triangular matrix
public :: elpa_invert_trm_complex !< Invert complex triangular matrix
public :: invert_trm_complex !< old, deprecated interface to invert complex triangular matrix
public :: elpa_cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_real !< old, deprecated interface to do Cholesky factorization of a real matrix
public :: elpa_cholesky_complex !< Cholesky factorization of a complex matrix
public :: cholesky_complex !< old, deprecated interface to do Cholesky factorization of a complex matrix
public :: elpa_solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
public :: solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
! Timing results, set by every call to solve_evp_xxx
......
......@@ -58,20 +58,175 @@ module elpa1_auxiliary
private
public :: mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: elpa_mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_at_b_real !< Old, deprecated interface to elpa_mult_at_b_real
public :: elpa_mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: mult_ah_b_complex !< old, deprecated interface to elpa_mult_ah_b_complex
public :: invert_trm_real !< Invert real triangular matrix
public :: invert_trm_complex !< Invert complex triangular matrix
public :: elpa_invert_trm_real !< Invert real triangular matrix
public :: invert_trm_real !< old, deprecated interface to elpa_invert_trm_real
public :: elpa_invert_trm_complex !< Invert complex triangular matrix
public :: invert_trm_complex !< old, deprecated interface to elpa_invert_trm_complex
public :: cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_complex !< Cholesky factorization of a complex matrix
public :: elpa_cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_real !< old, deprecated interface to elpa_cholesky_real
public :: solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
public :: elpa_cholesky_complex !< Cholesky factorization of a complex matrix
public :: cholesky_complex !< old, deprecated interface to cholesky_complex
public :: elpa_solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
!> \brief old, deprecated interface cholesky_real: Cholesky factorization of a real symmetric matrix
!> \details
!>
!> \param na Order of matrix
!> \param a(lda,matrixCols) Distributed matrix which should be factorized.
!> Distribution is like in Scalapack.
!> Only upper triangle is needs to be set.
!> On return, the upper triangle contains the Cholesky factor
!> and the lower triangle is set to 0.
!> \param lda Leading dimension of a
!> \param matrixCols local columns of matrix a
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \result succes logical, reports success or failure
interface cholesky_real
module procedure elpa_cholesky_real
end interface
!> \brief Old, deprecated interface invert_trm_real: Inverts a upper triangular matrix
!> \details
!> \param na Order of matrix
!> \param a(lda,matrixCols) Distributed matrix which should be inverted
!> Distribution is like in Scalapack.
!> Only upper triangle is needs to be set.
!> The lower triangle is not referenced.
!> \param lda Leading dimension of a
!> \param matrixCols local columns of matrix a
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \result succes logical, reports success or failure
interface invert_trm_real
module procedure elpa_invert_trm_real
end interface
!> \brief old, deprecated interface cholesky_complex: Cholesky factorization of a complex hermitian matrix
!> \details
!> \param na Order of matrix
!> \param a(lda,matrixCols) Distributed matrix which should be factorized.
!> Distribution is like in Scalapack.
!> Only upper triangle is needs to be set.
!> On return, the upper triangle contains the Cholesky factor
!> and the lower triangle is set to 0.
!> \param lda Leading dimension of a
!> \param matrixCols local columns of matrix a
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \result succes logical, reports success or failure
interface cholesky_complex
module procedure elpa_cholesky_complex
end interface
!> \brief old, deprecated interface invert_trm_complex: Inverts a complex upper triangular matrix
!> \details
!> \param na Order of matrix
!> \param a(lda,matrixCols) Distributed matrix which should be inverted
!> Distribution is like in Scalapack.
!> Only upper triangle is needs to be set.
!> The lower triangle is not referenced.
!> \param lda Leading dimension of a
!> \param matrixCols local columns of matrix a
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \result succes logical, reports success or failure
interface invert_trm_complex
module procedure elpa_invert_trm_complex
end interface
!> \brief mult_at_b_real: Performs C : = A**T * B
!> this is the old, deprecated interface for the newer elpa_mult_at_b_real
!> where A is a square matrix (na,na) which is optionally upper or lower triangular
!> B is a (na,ncb) matrix
!> C is a (na,ncb) matrix where optionally only the upper or lower
!> triangle may be computed
!> \details
!> \param uplo_a 'U' if A is upper triangular
!> 'L' if A is lower triangular
!> anything else if A is a full matrix
!> Please note: This pertains to the original A (as set in the calling program)
!> whereas the transpose of A is used for calculations
!> If uplo_a is 'U' or 'L', the other triangle is not used at all,
!> i.e. it may contain arbitrary numbers
!> \param uplo_c 'U' if only the upper diagonal part of C is needed
!> 'L' if only the upper diagonal part of C is needed
!> anything else if the full matrix C is needed
!> Please note: Even when uplo_c is 'U' or 'L', the other triangle may be
!> written to a certain extent, i.e. one shouldn't rely on the content there!
!> \param na Number of rows/columns of A, number of rows of B and C
!> \param ncb Number of columns of B and C
!> \param a matrix a
!> \param lda leading dimension of matrix a
!> \param b matrix b
!> \param ldb leading dimension of matrix b
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param c matrix c
!> \param ldc leading dimension of matrix c
interface mult_at_b_real
module procedure elpa_mult_at_b_real
end interface
!> \brief Old, deprecated interface mult_ah_b_complex: Performs C : = A**H * B
!> where A is a square matrix (na,na) which is optionally upper or lower triangular
!> B is a (na,ncb) matrix
!> C is a (na,ncb) matrix where optionally only the upper or lower
!> triangle may be computed
!> \details
!>
!> \param uplo_a 'U' if A is upper triangular
!> 'L' if A is lower triangular
!> anything else if A is a full matrix
!> Please note: This pertains to the original A (as set in the calling program)
!> whereas the transpose of A is used for calculations
!> If uplo_a is 'U' or 'L', the other triangle is not used at all,
!> i.e. it may contain arbitrary numbers
!> \param uplo_c 'U' if only the upper diagonal part of C is needed
!> 'L' if only the upper diagonal part of C is needed
!> anything else if the full matrix C is needed
!> Please note: Even when uplo_c is 'U' or 'L', the other triangle may be
!> written to a certain extent, i.e. one shouldn't rely on the content there!
!> \param na Number of rows/columns of A, number of rows of B and C
!> \param ncb Number of columns of B and C
!> \param a matrix a
!> \param lda leading dimension of matrix a
!> \param b matrix b
!> \param ldb leading dimension of matrix b
!> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param c matrix c
!> \param ldc leading dimension of matrix c
interface mult_ah_b_complex
module procedure elpa_mult_ah_b_complex
end interface
contains
!> \brief cholesky_real: Cholesky factorization of a real symmetric matrix
!> \brief elpa_cholesky_real: Cholesky factorization of a real symmetric matrix
!> \details
!>
!> \param na Order of matrix
......@@ -86,8 +241,8 @@ module elpa1_auxiliary
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \param succes logical, reports success or failure
subroutine cholesky_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug, success)
!> \result succes logical, reports success or failure
function elpa_cholesky_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) result(success)
#ifdef HAVE_DETAILED_TIMINGS
use timings
#endif
......@@ -112,12 +267,12 @@ module elpa1_auxiliary
real(kind=rk), allocatable :: tmp1(:), tmp2(:,:), tmatr(:,:), tmatc(:,:)
logical, intent(in) :: wantDebug
logical, intent(out) :: success
logical :: success
integer(kind=ik) :: istat
character(200) :: errorMessage
#ifdef HAVE_DETAILED_TIMINGS
call timer%start("cholesky_real")
call timer%start("elpa_cholesky_real")
#endif
call mpi_comm_rank(mpi_comm_rows,my_prow,mpierr)
call mpi_comm_size(mpi_comm_rows,np_rows,mpierr)
......@@ -138,13 +293,13 @@ module elpa1_auxiliary
allocate(tmp1(nblk*nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"cholesky_real: error when allocating tmp1 "//errorMessage
print *,"elpa_cholesky_real: error when allocating tmp1 "//errorMessage
stop
endif
allocate(tmp2(nblk,nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"cholesky_real: error when allocating tmp2 "//errorMessage
print *,"elpa_cholesky_real: error when allocating tmp2 "//errorMessage
stop
endif
......@@ -153,13 +308,13 @@ module elpa1_auxiliary
allocate(tmatr(l_rows,nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"cholesky_real: error when allocating tmatr "//errorMessage
print *,"elpa_cholesky_real: error when allocating tmatr "//errorMessage
stop
endif
allocate(tmatc(l_cols,nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"cholesky_real: error when allocating tmatc "//errorMessage
print *,"elpa_cholesky_real: error when allocating tmatc "//errorMessage
stop
endif
......@@ -186,7 +341,7 @@ module elpa1_auxiliary
call dpotrf('U',na-n+1,a(l_row1,l_col1),lda,info)
if (info/=0) then
if (wantDebug) write(error_unit,*) "ELPA1_cholesky_real: Error in dpotrf"
if (wantDebug) write(error_unit,*) "elpa_cholesky_real: Error in dpotrf"
success = .false.
return
endif
......@@ -206,7 +361,7 @@ module elpa1_auxiliary
call dpotrf('U',nblk,a(l_row1,l_col1),lda,info)
if (info/=0) then
if (wantDebug) write(error_unit,*) "ELPA1_cholesky_real: Error in dpotrf"
if (wantDebug) write(error_unit,*) "elpa_cholesky_real: Error in dpotrf"
success = .false.
return
endif
......@@ -227,7 +382,7 @@ module elpa1_auxiliary
enddo
if (l_cols-l_colx+1>0) &
call dtrsm('L','U','T','N',nblk,l_cols-l_colx+1,1.d0,tmp2,ubound(tmp2,dim=1),a(l_row1,l_colx),lda)
call dtrsm('L','U','T','N',nblk,l_cols-l_colx+1,1._rk,tmp2,ubound(tmp2,dim=1),a(l_row1,l_colx),lda)
endif
......@@ -250,16 +405,16 @@ module elpa1_auxiliary
lrs = l_rowx
lre = min(l_rows,(i+1)*l_rows_tile)
if (lce<lcs .or. lre<lrs) cycle
call DGEMM('N','T',lre-lrs+1,lce-lcs+1,nblk,-1.d0, &
call DGEMM('N','T',lre-lrs+1,lce-lcs+1,nblk,-1._rk, &
tmatr(lrs,1),ubound(tmatr,dim=1),tmatc(lcs,1),ubound(tmatc,dim=1), &
1.d0,a(lrs,lcs),lda)
1._rk,a(lrs,lcs),lda)
enddo
enddo
deallocate(tmp1, tmp2, tmatr, tmatc, stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"cholesky_real: error when deallocating tmp1 "//errorMessage
print *,"elpa_cholesky_real: error when deallocating tmp1 "//errorMessage
stop
endif
......@@ -274,12 +429,12 @@ module elpa1_auxiliary
endif
enddo
#ifdef HAVE_DETAILED_TIMINGS
call timer%stop("cholesky_real")
call timer%stop("elpa_cholesky_real")
#endif
end subroutine cholesky_real
end function elpa_cholesky_real
!> \brief invert_trm_real: Inverts a upper triangular matrix
!> \brief elpa_invert_trm_real: Inverts a upper triangular matrix
!> \details
!> \param na Order of matrix
!> \param a(lda,matrixCols) Distributed matrix which should be inverted
......@@ -292,8 +447,8 @@ module elpa1_auxiliary
!> \param mpi_comm_rows MPI communicator for rows
!> \param mpi_comm_cols MPI communicator for columns
!> \param wantDebug logical, more debug information on failure
!> \param succes logical, reports success or failure
subroutine invert_trm_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug, success)
!> \result succes logical, reports success or failure
function elpa_invert_trm_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) result(success)
use precision
use elpa1_compute
use elpa_utilities
......@@ -314,9 +469,10 @@ module elpa1_auxiliary
real(kind=rk), allocatable :: tmp1(:), tmp2(:,:), tmat1(:,:), tmat2(:,:)
logical, intent(in) :: wantDebug
logical, intent(out) :: success
logical :: success
integer(kind=ik) :: istat
character(200) :: errorMessage
call mpi_comm_rank(mpi_comm_rows,my_prow,mpierr)
call mpi_comm_size(mpi_comm_rows,np_rows,mpierr)
call mpi_comm_rank(mpi_comm_cols,my_pcol,mpierr)
......@@ -328,13 +484,13 @@ module elpa1_auxiliary
allocate(tmp1(nblk*nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"invert_trm_real: error when allocating tmp1 "//errorMessage
print *,"elpa_invert_trm_real: error when allocating tmp1 "//errorMessage
stop
endif
allocate(tmp2(nblk,nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"invert_trm_real: error when allocating tmp2 "//errorMessage
print *,"elpa_invert_trm_real: error when allocating tmp2 "//errorMessage
stop
endif
......@@ -343,13 +499,13 @@ module elpa1_auxiliary
allocate(tmat1(l_rows,nblk), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"invert_trm_real: error when allocating tmat1 "//errorMessage
print *,"elpa_invert_trm_real: error when allocating tmat1 "//errorMessage
stop
endif
allocate(tmat2(nblk,l_cols), stat=istat, errmsg=errorMessage)
if (istat .ne. 0) then
print *,"invert_trm_real: error when allocating tmat2 "//errorMessage
print *,"elpa_invert_trm_real: error when allocating tmat2 "//errorMessage
stop
endif
......@@ -376,7 +532,7 @@ module elpa1_auxiliary
call DTRTRI('U','N',nb,a(l_row1,l_col1),lda,info)