Documentation and interfaces for public availabe functions

parent cddfb00c
.TH "elpa_cholesky_complex" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_cholesky_complex \- Cholesky factorization of a complex hermetian matrix
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_cholesky_complex\fP (na, a(lda,matrixCols), lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "complex*16, intent(inout) \fBa\fP: locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
#include <complex.h>
.br
.RI "\fBint\fP success = \fBelpa_cholesky_complex\fP (\fBint\fP na, \fB double complex *\fPa, \fBint\fP lda, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "double complex *\fBa\fP: pointer to locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "int \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Does a Cholesky factorization of a complex, hermetian matrix. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBa\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBlda\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
.TH "elpa_cholesky_real" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_cholesky_real \- Cholesky factorization of a real symmetric matrix
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_cholesky_real\fP (na, a(lda,matrixCols), lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "real*8, intent(inout) \fBa\fP: locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
.RI "\fBint\fP success = \fBelpa_cholesky_real\fP (\fBint\fP na, \fB double *\fPa, \fBint\fP lda, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "double *\fBa\fP: pointer to locally distributed part of the matrix \fBa\fP. The local dimensions are \fBlda\fP x \fBmatrixCols\fP"
.br
.RI "int \fBlda\fP: leading dimension of locally distributed matrix \fBa\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Does a Cholesky factorization of a real, symmetric matrix. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBa\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBlda\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
.TH "elpa_solve_tridi" 3 "Wed Jun 29 2016" "ELPA" \" -*- nroff -*-
.ad l
.nh
.SH NAME
elpa_solve_tridi \- Solve tridiagonal eigensystem with divide and conquer method
.br
.SH SYNOPSIS
.br
.SS FORTRAN INTERFACE
use elpa1
.br
.br
.RI "success = \fBelpa_solve_trid\fP (na, nev, d(na), e(na), q(ldq,matrixCols), ldq, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug)"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "integer, intent(in) \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "integer, intent(in) \fBnev\fP: number of eigenvalues/vectors to be computed"
.br
.RI "real*8, intent(inout) \fBd(na)\fP: array d(na) on input diagonal elements of tridiagonal matrix, on output the eigenvalues in ascending order"
.br
.RI "real*8, intent(in) \fBe(na)\fP: array e(na) on input subdiagonal elements of matrix, on exit destroyed"
.br
.RI "real*8, intent(inout) \fBq\fP: on exit \fBq\fP contains the eigenvectors. The local dimensions are \fBldq\fP x \fBmatrixCols\fP"
.br
.RI "integer, intent(in) \fBldq\fP: leading dimension of locally distributed matrix \fBq\fP"
.br
.RI "integer, intent(in) \fBnblk\fP: blocksize of cyclic distribution, must be the same in both directions"
.br
.RI "integer, intent(in) \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "integer, intent(in) \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "integer, intent(in) \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "logical, intent(in) \fBwantDebug\fP: if .true. , print more debug information in case of an error"
.RI "logical \fBsuccess\fP: return value indicating success or failure"
.br
.SS C INTERFACE
#include "elpa.h"
.br
.RI "\fBint\fP success = \fBelpa_solve_tridi\fP (\fBint\fP na, \fBint\fP nev, \fB double *\fPd,\fB double *\fPe ,\fB double *\fPq, \fBint\fP ldq, \fBint\fP nblk, \fBint\fP matrixCols, \fBint\fP mpi_comm_rows, \fBint\fP mpi_comm_cols, \fBint\fP wantDebug );"
.br
.RI " "
.br
.RI "With the definintions of the input and output variables:"
.br
.RI "int \fBna\fP: global dimension of quadratic matrix \fBa\fP to solve"
.br
.RI "int \fBnev\fP: number of eigenvalues/eigenvectors to be computed"
.br
.RI "double *\fBd\fP: pointer to array d(na) on input diagonal elements of tridiagonal matrix, on output the eigenvalues in ascending order"
.br
.RI "double *\fBe\fP: pointer to array e(na) on input subdiagonal elements of matrix, on exit destroyed"
.br
.RI "double *\fBq\fP: on exit \fBq\fP contains the eigenvectors. The local dimensions are \fBldq\fP x \fBmatrixCols\fP"
.br
.RI "int \fBldq\fP: leading dimension of locally distributed matrix \fBq\fP"
.br
.RI "int \fBnblk\fP: blocksize of block cyclic distributin, must be the same in both directions"
.br
.RI "int \fBmatrixCols\fP: number of columns of locally distributed matrices \fBa\fP and \fBq\fP"
.br
.RI "int \fBmpi_comm_rows\fP: communicator for communication in rows. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBmpi_comm_cols\fP: communicator for communication in colums. Constructed with \fBget_elpa_communicators\fP(3)"
.br
.RI "int \fBwantDebug\fP: if 1, print more debug information in case of an error"
.br
.RI "int \fBsuccess\fP: return value indicating success (1) or failure (0)
.SH DESCRIPTION
Solves a tri-diagonal matrix and returns \fBnev\fP eigenvalues/eigenvectors. The ELPA communicators \fBmpi_comm_rows\fP and \fBmpi_comm_cols\fP are obtained with the \fBget_elpa_communicators\fP(3) function. The distributed quadratic marix \fBq\fP has global dimensions \fBna\fP x \fBna\fP, and a local size \fBldq\fP x \fBmatrixCols\fP.
.br
.SH "SEE ALSO"
\fBget_elpa_communicators\fP(3)
......@@ -100,16 +100,26 @@ module ELPA1
! imported from elpa1_auxilliary
public :: mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: elpa_mult_at_b_real !< Multiply real matrices A**T * B
public :: mult_at_b_real !< old, deprecated interface to multiply real matrices A**T * B
public :: invert_trm_real !< Invert real triangular matrix
public :: invert_trm_complex !< Invert complex triangular matrix
public :: elpa_mult_ah_b_complex !< Multiply complex matrices A**H * B
public :: mult_ah_b_complex !< old, deprecated interface to multiply complex matrices A**H * B
public :: cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_complex !< Cholesky factorization of a complex matrix
public :: elpa_invert_trm_real !< Invert real triangular matrix
public :: invert_trm_real !< old, deprecated interface to invert real triangular matrix
public :: elpa_invert_trm_complex !< Invert complex triangular matrix
public :: invert_trm_complex !< old, deprecated interface to invert complex triangular matrix
public :: elpa_cholesky_real !< Cholesky factorization of a real matrix
public :: cholesky_real !< old, deprecated interface to do Cholesky factorization of a real matrix
public :: elpa_cholesky_complex !< Cholesky factorization of a complex matrix
public :: cholesky_complex !< old, deprecated interface to do Cholesky factorization of a complex matrix
public :: elpa_solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
public :: solve_tridi !< Solve tridiagonal eigensystem with divide and conquer method
! Timing results, set by every call to solve_evp_xxx
......
This diff is collapsed.
......@@ -309,3 +309,361 @@
end function
!c> /*
!c> \brief C interface to solve tridiagonal eigensystem with divide and conquer method
!c> \details
!c>
!c> \param na Matrix dimension
!c> \param nev number of eigenvalues/vectors to be computed
!c> \param d array d(na) on input diagonal elements of tridiagonal matrix, on
!c> output the eigenvalues in ascending order
!c> \param e array e(na) on input subdiagonal elements of matrix, on exit destroyed
!c> \param q on exit : matrix q(ldq,matrixCols) contains the eigenvectors
!c> \param ldq leading dimension of matrix q
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param matrixCols columns of matrix q
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param wantDebug give more debug information if 1, else 0
!c> \result success int 1 on success, else 0
!c> */
!c> int elpa_solve_tridi(int na, int nev, double *d, double *e, double *q, int ldq, int nblk, int matrixCols, int mpi_comm_rows, int mpi_comm_cols, int wantDebug);
function elpa_solve_tridi_wrapper(na, nev, d, e, q, ldq, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) &
result(success) bind(C,name="elpa_solve_tridi")
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_solve_tridi
implicit none
integer(kind=c_int) :: success
integer(kind=c_int), value, intent(in) :: na, nev, ldq, nblk, matrixCols, mpi_comm_cols, mpi_comm_rows
integer(kind=c_int), value :: wantDebug
real(kind=c_double) :: d(1:na), e(1:na), q(1:ldq, 1:matrixCols)
logical :: successFortran, wantDebugFortran
if (wantDebug .ne. 0) then
wantDebugFortran = .true.
else
wantDebugFortran = .false.
endif
successFortran = elpa_solve_tridi(na, nev, d, e, q, ldq, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebugFortran)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief C interface for elpa_mult_at_b_real: Performs C : = A**T * B
!c> where A is a square matrix (na,na) which is optionally upper or lower triangular
!c> B is a (na,ncb) matrix
!c> C is a (na,ncb) matrix where optionally only the upper or lower
!c> triangle may be computed
!c> \details
!c> \param uplo_a 'U' if A is upper triangular
!c> 'L' if A is lower triangular
!c> anything else if A is a full matrix
!c> Please note: This pertains to the original A (as set in the calling program)
!c> whereas the transpose of A is used for calculations
!c> If uplo_a is 'U' or 'L', the other triangle is not used at all,
!c> i.e. it may contain arbitrary numbers
!c> \param uplo_c 'U' if only the upper diagonal part of C is needed
!c> 'L' if only the upper diagonal part of C is needed
!c> anything else if the full matrix C is needed
!c> Please note: Even when uplo_c is 'U' or 'L', the other triangle may be
!c> written to a certain extent, i.e. one shouldn't rely on the content there!
!c> \param na Number of rows/columns of A, number of rows of B and C
!c> \param ncb Number of columns of B and C
!c> \param a matrix a
!c> \param lda leading dimension of matrix a
!c> \param b matrix b
!c> \param ldb leading dimension of matrix b
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param c matrix c
!c> \param ldc leading dimension of matrix c
!c> \result success int report success (1) or failure (0)
!c> */
!c> int elpa_mult_at_b_real(char uplo_a, char uplo_c, int na, int ncb, double *a, int lda, double *b, int ldb, int nlbk, int mpi_comm_rows, int mpi_comm_cols, double *c, int ldc);
function elpa_mult_at_b_real_wrapper(uplo_a, uplo_c, na, ncb, a, lda, b, ldb, nblk, mpi_comm_rows, mpi_comm_cols, c, ldc) &
bind(C,name="elpa_mult_at_b_real") result(success)
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_mult_at_b_real
implicit none
character(1,C_CHAR), value :: uplo_a, uplo_c
integer(kind=c_int), value :: na, ncb, lda, ldb, nblk, mpi_comm_rows, mpi_comm_cols, ldc
integer(kind=c_int) :: success
real(kind=c_double) :: a(lda,*), b(ldb,*), c(ldc,*)
logical :: successFortran
successFortran = elpa_mult_at_b_real(uplo_a, uplo_c, na, ncb, a, lda, b, ldb, nblk, mpi_comm_rows, mpi_comm_cols, c, ldc)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief C interface for elpa_mult_ah_b_complex: Performs C : = A**H * B
!c> where A is a square matrix (na,na) which is optionally upper or lower triangular
!c> B is a (na,ncb) matrix
!c> C is a (na,ncb) matrix where optionally only the upper or lower
!c> triangle may be computed
!c> \details
!c>
!c> \param uplo_a 'U' if A is upper triangular
!c> 'L' if A is lower triangular
!c> anything else if A is a full matrix
!c> Please note: This pertains to the original A (as set in the calling program)
!c> whereas the transpose of A is used for calculations
!c> If uplo_a is 'U' or 'L', the other triangle is not used at all,
!c> i.e. it may contain arbitrary numbers
!c> \param uplo_c 'U' if only the upper diagonal part of C is needed
!c> 'L' if only the upper diagonal part of C is needed
!c> anything else if the full matrix C is needed
!c> Please note: Even when uplo_c is 'U' or 'L', the other triangle may be
!c> written to a certain extent, i.e. one shouldn't rely on the content there!
!c> \param na Number of rows/columns of A, number of rows of B and C
!c> \param ncb Number of columns of B and C
!c> \param a matrix a
!c> \param lda leading dimension of matrix a
!c> \param b matrix b
!c> \param ldb leading dimension of matrix b
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param c matrix c
!c> \param ldc leading dimension of matrix c
!c> \result success int reports success (1) or failure (0)
!c> */
!c> int elpa_mult_ah_b_complex(char uplo_a, char uplo_c, int na, int ncb, double complex *a, int lda, double complex *b, int ldb, int nblk, int mpi_comm_rows, int mpi_comm_cols, double complex *c, int ldc);
function elpa_mult_ah_b_complex_wrapper( uplo_a, uplo_c, na, ncb, a, lda, b, ldb, nblk, mpi_comm_rows, mpi_comm_cols, c, ldc) &
result(success) bind(C,name="elpa_mult_ah_b_complex")
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_mult_ah_b_complex
implicit none
character(1,C_CHAR), value :: uplo_a, uplo_c
integer(kind=c_int), value :: na, ncb, lda, ldb, nblk, mpi_comm_rows, mpi_comm_cols, ldc
integer(kind=c_int) :: success
complex(kind=c_double_complex) :: a(lda,*), b(ldb,*), c(ldc,*)
logical :: successFortran
successFortran = elpa_mult_ah_b_complex(uplo_a, uplo_c, na, ncb, a, lda, b, ldb, nblk, mpi_comm_rows, mpi_comm_cols, c, ldc)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief C interface to elpa_invert_trm_real: Inverts a upper triangular matrix
!c> \details
!c> \param na Order of matrix
!c> \param a(lda,matrixCols) Distributed matrix which should be inverted
!c> Distribution is like in Scalapack.
!c> Only upper triangle is needs to be set.
!c> The lower triangle is not referenced.
!c> \param lda Leading dimension of a
!c> \param matrixCols local columns of matrix a
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param wantDebug int more debug information on failure if 1, else 0
!c> \result succes int reports success (1) or failure (0)
!c> */
!c> int elpa_invert_trm_real(int na, double *a, int lda, int nblk, int matrixCols, int mpi_comm_rows, int mpi_comm_cols, int wantDebug);
function elpa_invert_trm_real_wrapper(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) &
result(success) bind(C,name="elpa_invert_trm_real")
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_invert_trm_real
implicit none
integer(kind=c_int), value :: na, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols
integer(kind=c_int), value :: wantDebug
integer(kind=c_int) :: success
real(kind=c_double) :: a(lda,matrixCols)
logical :: wantDebugFortran, successFortran
if (wantDebug .ne. 0) then
wantDebugFortran = .true.
else
wantDebugFortran = .false.
endif
successFortran = elpa_invert_trm_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebugFortran)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief C interface to elpa_invert_trm_complex: Inverts a complex upper triangular matrix
!c> \details
!c> \param na Order of matrix
!c> \param a(lda,matrixCols) Distributed matrix which should be inverted
!c> Distribution is like in Scalapack.
!c> Only upper triangle is needs to be set.
!c> The lower triangle is not referenced.
!c> \param lda Leading dimension of a
!c> \param matrixCols local columns of matrix a
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param wantDebug int more debug information on failure if 1, else 0
!c> \result succes int reports success (1) or failure (0)
!c> */
!c> int elpa_invert_trm_complex(int na, double complex *a, int lda, int nblk, int matrixCols, int mpi_comm_rows, int mpi_comm_cols, int wantDebug);
function elpa_invert_trm_complex_wrapper(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) result(success) &
bind(C,name="elpa_invert_trm_complex")
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_invert_trm_complex
implicit none
integer(kind=c_int), value :: na, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols
integer(kind=c_int), value :: wantDebug
integer(kind=c_int) :: success
complex(kind=c_double_complex) :: a(lda, matrixCols)
logical :: successFortran, wantDebugFortran
if (wantDebug .ne. 0) then
wantDebugFortran = .true.
else
wantDebugFortran = .false.
endif
successFortran = elpa_invert_trm_complex(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebugFortran)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief elpa_cholesky_real: Cholesky factorization of a real symmetric matrix
!c> \details
!c>
!c> \param na Order of matrix
!c> \param a(lda,matrixCols) Distributed matrix which should be factorized.
!c> Distribution is like in Scalapack.
!c> Only upper triangle is needs to be set.
!c> On return, the upper triangle contains the Cholesky factor
!c> and the lower triangle is set to 0.
!c> \param lda Leading dimension of a
!c> \param matrixCols local columns of matrix a
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param wantDebug int more debug information on failure if 1, else 0
!c> \result succes int reports success (1) or failure (0)
!c> */
!c> int elpa_cholesky_real(int na, double *a, int lda, int nblk, int matrixCols, int mpi_comm_rows, int mpi_comm_cols, int wantDebug);
function elpa_cholesky_real_wrapper(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) result(success) &
bind(C,name="elpa_cholesky_real")
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_cholesky_real
implicit none
integer(kind=c_int), value :: na, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug
integer(kind=c_int) :: success
real(kind=c_double) :: a(lda,matrixCols)
logical :: successFortran, wantDebugFortran
if (wantDebug .ne. 0) then
wantDebugFortran = .true.
else
wantDebugFortran = .false.
endif
successFortran = elpa_cholesky_real(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebugFortran)
if (successFortran) then
success = 1
else
success = 0
endif
end function
!c> /*
!c> \brief C interface elpa_cholesky_complex: Cholesky factorization of a complex hermitian matrix
!c> \details
!c> \param na Order of matrix
!c> \param a(lda,matrixCols) Distributed matrix which should be factorized.
!c> Distribution is like in Scalapack.
!c> Only upper triangle is needs to be set.
!c> On return, the upper triangle contains the Cholesky factor
!c> and the lower triangle is set to 0.
!c> \param lda Leading dimension of a
!c> \param matrixCols local columns of matrix a
!c> \param nblk blocksize of cyclic distribution, must be the same in both directions!
!c> \param mpi_comm_rows MPI communicator for rows
!c> \param mpi_comm_cols MPI communicator for columns
!c> \param wantDebug int more debug information on failure, if 1, else 0
!c> \result succes int reports success (1) or failure (0)
!c> */
!c> int elpa_cholesky_complex(int na, double complex *a, int lda, int nblk, int matrixCols, int mpi_comm_rows, int mpi_comm_cols, int wantDebug);
function elpa_cholesky_complex_wrapper(na, a, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug) result(success)
use, intrinsic :: iso_c_binding
use elpa1_auxiliary, only : elpa_cholesky_complex
implicit none
integer(kind=c_int), value :: na, lda, nblk, matrixCols, mpi_comm_rows, mpi_comm_cols, wantDebug
integer(kind=c_int) :: success
complex(kind=c_double_complex) :: a(lda,matrixCols)