Commit 72921d43 authored by Serhiy Mochalskyy's avatar Serhiy Mochalskyy

First commit

parents
program MPIIO_helloworld
use mpi
implicit none
integer(mpi_offset_kind) :: offset
integer, dimension(mpi_status_size) :: wstatus
integer, parameter :: msgsize=6
real*8,dimension(:) :: arr(msgsize)
real*8 :: number
character(msgsize) :: message
integer :: ierr, rank, comsize, fileno
integer i
call MPI_Init(ierr)
call MPI_Comm_size(MPI_COMM_WORLD, comsize, ierr)
call MPI_Comm_rank(MPI_COMM_WORLD, rank, ierr)
do i=1,msgsize
arr(i)=rank*10+i
enddo
if (mod(rank,2) == 0) then
message = "Hello "
else
message = "World!"
endif
write(500+rank,*) arr(:)
offset = rank*8
if (rank==0) number=arr(1)
if (rank==1) number=arr(2)
if (rank>1) number=arr(3)
call MPI_File_open(MPI_COMM_WORLD, "helloworld.txt", &
ior(MPI_MODE_CREATE,MPI_MODE_WRONLY),&
MPI_INFO_NULL, fileno, ierr)
call MPI_File_seek (fileno, offset, MPI_SEEK_SET, ierr)
call MPI_File_write(fileno, number, 1, MPI_DOUBLE_PRECISION, &
wstatus, ierr)
call MPI_File_close(fileno, ierr)
call MPI_Finalize(ierr)
end program MPIIO_helloworld
File added
This diff is collapsed.
cp: cannot stat ‘boundary.txt’: No such file or directory
cp: cannot stat ‘coil.txt’: No such file or directory
cp: cannot stat ‘input’: No such file or directory
cp: cannot stat ‘boundary.txt’: No such file or directory
cp: cannot stat ‘coil.txt’: No such file or directory
cp: cannot stat ‘input’: No such file or directory
cp: cannot stat ‘boundary.txt’: No such file or directory
cp: cannot stat ‘coil.txt’: No such file or directory
cp: cannot stat ‘input’: No such file or directory
cp: cannot stat ‘boundary.txt’: No such file or directory
cp: cannot stat ‘coil.txt’: No such file or directory
cp: cannot stat ‘input’: No such file or directory
cp: cannot stat ‘boundary.txt’: No such file or directory
cp: cannot stat ‘coil.txt’: No such file or directory
cp: cannot stat ‘input’: No such file or directory
=>> PBS: job killed: walltime 6901 exceeded limit 6900
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002B767871B100 Unknown Unknown Unknown
libpthread-2.17.s 00002B767871A3A4 __fcntl Unknown Unknown
libmpi.so.12 00002B76790557F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002B7678DA6C26 Unknown Unknown Unknown
libmpi.so.12 00002B76792675B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002B76789BE35B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002B767A117B15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002B0EDD602100 Unknown Unknown Unknown
libpthread-2.17.s 00002B0EDD6013A4 __fcntl Unknown Unknown
libmpi.so.12 00002B0EDDF3C7F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002B0EDDC8DC26 Unknown Unknown Unknown
libmpi.so.12 00002B0EDE14E5B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002B0EDD8A535B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002B0EDEFFEB15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002BAAA079F100 Unknown Unknown Unknown
parallel_IO.out 000000000042A1F0 Unknown Unknown Unknown
libpthread-2.17.s 00002BAAA079F100 Unknown Unknown Unknown
libpthread-2.17.s 00002BAAA079E3A4 __fcntl Unknown Unknown
libmpi.so.12 00002BAAA10D97F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002BAAA0E2AC26 Unknown Unknown Unknown
libmpi.so.12 00002BAAA12EB5B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002BAAA0A4235B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002BAAA219BB15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002AAD1E391100 Unknown Unknown Unknown
parallel_IO.out 000000000042A1F0 Unknown Unknown Unknown
libpthread-2.17.s 00002AAD1E391100 Unknown Unknown Unknown
libpthread-2.17.s 00002AAD1E3903A4 __fcntl Unknown Unknown
libmpi.so.12 00002AAD1ECCB7F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002AAD1EA1CC26 Unknown Unknown Unknown
libmpi.so.12 00002AAD1EEDD5B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002AAD1E63435B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002AAD1FD8DB15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002AE424A88100 Unknown Unknown Unknown
libpthread-2.17.s 00002AE424A873A4 __fcntl Unknown Unknown
libmpi.so.12 00002AE4253C27F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002AE425113C26 Unknown Unknown Unknown
libmpi.so.12 00002AE4255D45B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002AE424D2B35B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002AE426484B15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
forrtl: error (78): process killed (SIGTERM)
Image PC Routine Line Source
parallel_IO.out 00000000004A6831 Unknown Unknown Unknown
parallel_IO.out 00000000004A496B Unknown Unknown Unknown
parallel_IO.out 0000000000471284 Unknown Unknown Unknown
parallel_IO.out 0000000000471096 Unknown Unknown Unknown
parallel_IO.out 0000000000450879 Unknown Unknown Unknown
parallel_IO.out 000000000042A35C Unknown Unknown Unknown
libpthread-2.17.s 00002B047E6BD100 Unknown Unknown Unknown
libpthread-2.17.s 00002B047E6BC3A4 __fcntl Unknown Unknown
libmpi.so.12 00002B047EFF77F3 ADIOI_Set_lock Unknown Unknown
libmpi.so.12.0 00002B047ED48C54 Unknown Unknown Unknown
libmpi.so.12 00002B047F2095B7 MPI_File_write Unknown Unknown
libmpifort.so.12. 00002B047E96035B MPI_FILE_WRITE Unknown Unknown
parallel_IO.out 00000000004058EA Unknown Unknown Unknown
parallel_IO.out 0000000000404FDE Unknown Unknown Unknown
libc-2.17.so 00002B04800B9B15 __libc_start_main Unknown Unknown
parallel_IO.out 0000000000404EE9 Unknown Unknown Unknown
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=04:55:00
#PBS -l select=1:ncpus=36:mpiprocs=36:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
Total wall clock time including output= 3.43932010670503 hours
=======================================================================
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=04:55:00
#PBS -l select=1:ncpus=36:mpiprocs=36:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
Total wall clock time including output= 0.373999470538563 hours
=======================================================================
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=04:55:00
#PBS -l select=1:ncpus=36:mpiprocs=6:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
Total wall clock time including output= 0.117738883031739 hours
=======================================================================
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=01:55:00
#PBS -l select=1:ncpus=36:mpiprocs=2:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
Total wall clock time = 0.370842863917351 hours
=======================================================================
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=02:55:00
#PBS -l select=1:ncpus=36:mpiprocs=6:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO_64.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL_64,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
#!/bin/bash
#PBS -q xfuaprod
#PBS -l walltime=01:55:00
#PBS -l select=1:ncpus=36:mpiprocs=6:mem=100GB
#PBS -j oe
#PBS -A FUSIO_HLST
#PBS -N MPI_IO
#PBS -m ae
#PBS -M serhiy.mochalskyy@ipp.mpg.de
cd $PBS_O_WORKDIR
FOLDER_NAME=${PBS_JOBNAME}_${PBS_JOBID}
BINARY=parallel_IO.out
mkdir $FOLDER_NAME
cp {boundary.txt,coil.txt,input,submit_script_marconi_STARWALL,$BINARY} $FOLDER_NAME
#cp {input,submit_script_marconi,$BINARY} $FOLDER_NAME
cd $FOLDER_NAME
module load intel intelmpi mkl
#export FOR_PRINT=ok.out
#export FOR_PRINT=/dev/null
#mpirun -genvlist FOR_PRINT ./STARWALL_JOREK_Linux>input
#cat /proc/sys/kernel/pid_max
mpirun ./$BINARY>my_output
module sca
implicit none
integer :: MYPNUM,NPROCS,CONTEXT,NPROW,NPCOL,MYCOL,MYROW,NB,MP_A,NQ_A,LDA_A
integer :: ipc,ipr
integer :: LDA_wp,LDA_pwe, LDA_pp,LDA_ep,LDA_ew, LDA_pwe_s,&
LDA_we,LDA_ee,LDA_ww,LDA_rw,LDA_sww, LDA_s_ww_inv
integer :: LDA_ey,LDA_ye,LDA_dee
integer :: DESCA(9),DESCB(9),DESCZ(9),DESCC(9)
integer :: DESC_ye(9)
integer :: INFO,INFO_A,INFO_B,INFO_Z,INFO_C
real :: ORFAC
integer :: lwork_cooficient
end module sca
program parallel_IO
use sca
use mpi
implicit none
integer :: rank,numtasks,ERRORCODE,ier
integer :: sqrtnp,step
integer :: ntri_p_loc_b,ntri_p_loc_e, ntri_w_loc_b,ntri_w_loc_e
real :: time1,time2,time3
integer :: n_matrix_row,n_matrix_col
real*8,dimension(:,:),allocatable :: arr, arr_loc, arr_glo_print
integer :: i,j,ierr
logical inside_i,inside_j
integer i_loc,j_loc
integer ::IC,JC
integer, dimension(mpi_status_size) :: wstatus
integer, dimension (MPI_STATUS_SIZE) :: status
integer (kind = MPI_OFFSET_KIND) :: offset, empty
integer size
integer NUMROC
EXTERNAL NUMROC
integer INDXL2G
EXTERNAL INDXL2G
integer thefile
!=====================================
call MPI_INIT(ier)
if (ier .ne. MPI_SUCCESS) then
print *,'Error starting MPI program. Terminating!!'
call MPI_ABORT(MPI_COMM_WORLD, ERRORCODE, ier)
endif
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ier)
call MPI_COMM_SIZE(MPI_COMM_WORLD, numtasks, ier)
!In order to measure total wallclock time
call MPI_BARRIER(MPI_COMM_WORLD,ier)
time1=MPI_WTIME()
sqrtnp=int(sqrt(real(numtasks))+1)
do i=1,sqrtnp
if(mod(numtasks,i).eq.0) NPCOL=i
enddo
NPROW = numtasks / NPCOL
!Check that the number of process passed as the argument matches the
!number of
!processes in the processor grid
CALL BLACS_PINFO(MYPNUM, NPROCS)
CALL BLACS_GET(-1, 0, CONTEXT)
IF (NPROCS /= NPROW * NPCOL) THEN
WRITE(*,*) 'Error! Number of processors passed does not match with processors in the grid. &
NPROW=',NPROW,"NPCOL=",NPCOL,"NPROCS=",NPROCS
STOP
END IF
CALL BLACS_GRIDINIT (CONTEXT, 'R', NPROW, NPCOL )
CALL BLACS_GRIDINFO (CONTEXT, NPROW, NPCOL, MYROW, MYCOL)
size=9
n_matrix_row=size
n_matrix_col=size
allocate(Arr(n_matrix_row,n_matrix_col),arr_glo_print(n_matrix_row,n_matrix_col),stat=ier)
IF (IER /= 0) THEN
WRITE (*,*) "global matrix can not allocate"
STOP
END IF
do i=1,n_matrix_row
do j=1, n_matrix_col
Arr(i,j)=i*10.0+j
enddo
enddo
! if(rank==0) then
! do i=1,n_matrix_row
! do j=1, n_matrix_col
! write(*,*) Arr(i,j)
! enddo
! enddo
! endif
! write(*,*) Arr(:,:)
NB=2
MP_A=NUMROC(n_matrix_row, NB, MYROW, 0, NPROW)
NQ_A=NUMROC(n_matrix_col, NB, MYCOL, 0, NPCOL)
LDA_A= MAX(1, MP_A);
allocate(Arr_loc(MP_A,NQ_A), stat=ier)
IF (IER /= 0) THEN
WRITE (*,*) "matrix_pp Can not allocate local matrix a_pp: MY_PROC_NUM=",MYPNUM
STOP
END IF
! write(*,*) rank, " ",MP_A, NQ_A
do i=1,size
do j=1, size
call ScaLAPACK_mapping_i(i,i_loc,inside_i)
if (inside_i) then
call ScaLAPACK_mapping_j(j,j_loc,inside_j)
if (inside_j) then
arr_loc(i_loc,j_loc)=Arr(i,j)
endif
endif
enddo
enddo
! do i=1,MP_A
! do j=1, NQ_A
! write(100+rank,*) i,j,Arr_loc(i,j)
! enddo
! enddo
!==========================================Print
! DO i_loc = 1,MP_A
! IC= INDXL2G( i_loc, NB, MYROW, 0, NPROW)
! DO j_loc = 1,NQ_A
! JC= INDXL2G( j_loc, NB, MYCOL, 0, NPCOL)
! arr_glo_print(IC,JC) = arr_loc(i_loc,j_loc)
! END DO
! END DO
! if(rank==0) then
! call MPI_REDUCE(MPI_IN_PLACE, arr_glo_print, n_matrix_row*n_matrix_col, &
! MPI_DOUBLE_PRECISION, MPI_SUM, 0, MPI_COMM_WORLD, IER)
! else
! call MPI_REDUCE(arr_glo_print, arr_glo_print, n_matrix_row*n_matrix_col, &
! MPI_DOUBLE_PRECISION, MPI_SUM, 0, MPI_COMM_WORLD, IER)
!
! endif
! if(rank==0) then
! write(200,*) arr_glo_print(:,:)
! endif
!=======================================================
call MPI_FILE_OPEN(MPI_COMM_WORLD, 'testfile1.txt', &
MPI_MODE_WRONLY + MPI_MODE_CREATE, &
MPI_INFO_NULL, thefile, ier)
call MPI_BARRIER(MPI_COMM_WORLD, ier)
offset=0
time1=MPI_WTIME()
DO i_loc = 1,MP_A
IC= INDXL2G( i_loc, NB, MYROW, 0, NPROW)
DO j_loc = 1,NQ_A
JC= INDXL2G( j_loc, NB, MYCOL, 0, NPCOL)
offset=((IC-1)*n_matrix_col+JC-1)*8
call MPI_File_seek (thefile, offset, MPI_SEEK_SET, ierr)
call MPI_File_write(thefile, arr_loc(i_loc,j_loc), 1, MPI_DOUBLE_PRECISION, &
status, ierr)
END DO
END DO
call MPI_FILE_CLOSE(thefile, ier)
time2=MPI_WTIME()
if(rank==0) write(*,*) 'Total wall clock time =',(time2-time1)/3600.0, ' hours'
if(rank==0) write(*,*) '======================================================================='
!call MPI_File_seek (thefile, offset, MPI_SEEK_SET, ierr)
!call MPI_File_write(thefile, arr_loc(1,1), 1, MPI_DOUBLE_PRECISION, &
! status, ierr)