TABLE OF CONTENTS
- ABINIT/m_libpaw_mpi
- ABINIT/xpaw_mpi_allgather_dp1d
- ABINIT/xpaw_mpi_allgather_dp2d
- ABINIT/xpaw_mpi_allgather_dp3d
- ABINIT/xpaw_mpi_allgather_int1d
- ABINIT/xpaw_mpi_allgatherv_dp1d
- ABINIT/xpaw_mpi_allgatherv_dp2d
- ABINIT/xpaw_mpi_allgatherv_int1d
- ABINIT/xpaw_mpi_alltoall_dp1d
- ABINIT/xpaw_mpi_alltoall_dp2d
- ABINIT/xpaw_mpi_alltoall_int1d
- ABINIT/xpaw_mpi_alltoallv_dp1d
- ABINIT/xpaw_mpi_alltoallv_dp2d
- ABINIT/xpaw_mpi_alltoallv_int1d
- ABINIT/xpaw_mpi_bcast_dp1d
- ABINIT/xpaw_mpi_bcast_dp2d
- ABINIT/xpaw_mpi_bcast_dp3d
- ABINIT/xpaw_mpi_bcast_int
- ABINIT/xpaw_mpi_bcast_int1d
- ABINIT/xpaw_mpi_exch_dp1d
- ABINIT/xpaw_mpi_exch_dp2d
- ABINIT/xpaw_mpi_exch_dp3d
- ABINIT/xpaw_mpi_exch_int1d
- ABINIT/xpaw_mpi_gather_dp1d
- ABINIT/xpaw_mpi_gather_dp2d
- ABINIT/xpaw_mpi_gather_int1d
- ABINIT/xpaw_mpi_gatherv_dp1d
- ABINIT/xpaw_mpi_gatherv_dp2d
- ABINIT/xpaw_mpi_gatherv_int1d
- ABINIT/xpaw_mpi_get_tag_ub
- ABINIT/xpaw_mpi_irecv_dp1d
- ABINIT/xpaw_mpi_irecv_dp2d
- ABINIT/xpaw_mpi_irecv_int1d
- ABINIT/xpaw_mpi_isend_dp1d
- ABINIT/xpaw_mpi_isend_dp2d
- ABINIT/xpaw_mpi_isend_int1d
- ABINIT/xpaw_mpi_recv_dp1d
- ABINIT/xpaw_mpi_recv_dp2d
- ABINIT/xpaw_mpi_recv_dp3d
- ABINIT/xpaw_mpi_recv_int1d
- ABINIT/xpaw_mpi_scatterv_dp1d
- ABINIT/xpaw_mpi_scatterv_dp2d
- ABINIT/xpaw_mpi_scatterv_int1d
- ABINIT/xpaw_mpi_send_dp1d
- ABINIT/xpaw_mpi_send_dp2d
- ABINIT/xpaw_mpi_send_dp3d
- ABINIT/xpaw_mpi_send_int1d
- ABINIT/xpaw_mpi_sum_dp1d
- ABINIT/xpaw_mpi_sum_dp2d
- ABINIT/xpaw_mpi_sum_dp3d
- ABINIT/xpaw_mpi_sum_int
- ABINIT/xpaw_mpi_sum_int1d
- m_libpaw_mpi/xpaw_mpi_abort
- m_libpaw_mpi/xpaw_mpi_barrier
- m_libpaw_mpi/xpaw_mpi_comm_rank
- m_libpaw_mpi/xpaw_mpi_comm_size
- m_libpaw_mpi/xpaw_mpi_iprobe
- m_libpaw_mpi/xpaw_mpi_wait
- m_libpaw_mpi/xpaw_mpi_waitall
ABINIT/m_libpaw_mpi [ Modules ]
NAME
m_libpaw_mpi
FUNCTION
libPAW wrappers for MPI library. Provides MPI named constants or tools as well as a set of generic interfaces wrapping MPI primitives.
COPYRIGHT
Copyright (C) 2014-2024 ABINIT group (MT, MG, ...) This file is distributed under the terms of the GNU General Public License, see ~abinit/COPYING or http://www.gnu.org/copyleft/gpl.txt .
NOTES
This file comes directly from m_xmpi.F90 module delivered with ABINIT. FOR DEVELOPPERS: in order to preserve the portability of libPAW library, please consult ~abinit/src/??_libpaw/libpaw-coding-rules.txt
SOURCE
24 #include "libpaw.h" 25 26 module m_libpaw_mpi 27 28 USE_DEFS 29 30 #ifdef HAVE_MPI2 31 use mpi 32 #endif 33 34 implicit none 35 36 private
ABINIT/xpaw_mpi_allgather_dp1d [ Functions ]
NAME
xpaw_mpi_allgather_dp1d
FUNCTION
MPI_ALLGATHER for 1D double precision arrays
INPUTS
xval= buffer array nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
SOURCE
556 subroutine xpaw_mpi_allgather_dp1d(xval,nelem,recvbuf,spaceComm,ier) 557 558 !Arguments------------------------- 559 real(dp), intent(in) :: xval(:) 560 real(dp), intent(inout) :: recvbuf(:) 561 integer, intent(in) :: nelem,spaceComm 562 integer, intent(out) :: ier 563 564 ! ************************************************************************* 565 ier=0 566 #if defined HAVE_MPI 567 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 568 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,spaceComm,ier) 569 else if (spaceComm == xpaw_mpi_comm_self) then 570 recvbuf(1:nelem)=xval(1:nelem) 571 end if 572 #else 573 recvbuf(1:nelem)=xval(1:nelem) 574 #endif 575 end subroutine xpaw_mpi_allgather_dp1d
ABINIT/xpaw_mpi_allgather_dp2d [ Functions ]
NAME
xpaw_mpi_allgather_dp2d
FUNCTION
MPI_ALLGATHER for 2D double precision arrays
INPUTS
xval= buffer array nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
SOURCE
598 subroutine xpaw_mpi_allgather_dp2d(xval,nelem,recvbuf,spaceComm,ier) 599 600 !Arguments------------------------- 601 real(dp), intent(in) :: xval(:,:) 602 real(dp), intent(inout) :: recvbuf(:,:) 603 integer, intent(in) :: nelem,spaceComm 604 integer, intent(out) :: ier 605 606 ! ************************************************************************* 607 ier=0 608 #if defined HAVE_MPI 609 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 610 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,spaceComm,ier) 611 else if (spaceComm == xpaw_mpi_comm_self) then 612 recvbuf(:,:)=xval(:,:) 613 end if 614 #else 615 recvbuf(:,:)=xval(:,:) 616 #endif 617 end subroutine xpaw_mpi_allgather_dp2d
ABINIT/xpaw_mpi_allgather_dp3d [ Functions ]
NAME
xpaw_mpi_allgather_dp3d
FUNCTION
MPI_ALLGATHER for 3D double precision arrays
INPUTS
xval= buffer array nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
SOURCE
640 subroutine xpaw_mpi_allgather_dp3d(xval,nelem,recvbuf,spaceComm,ier) 641 642 !Arguments------------------------- 643 real(dp), intent(in) :: xval(:,:,:) 644 real(dp), intent(inout) :: recvbuf(:,:,:) 645 integer, intent(in) :: nelem,spaceComm 646 integer, intent(out) :: ier 647 648 ! ************************************************************************* 649 ier=0 650 #if defined HAVE_MPI 651 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 652 call MPI_ALLGATHER(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,nelem,MPI_DOUBLE_PRECISION,spaceComm,ier) 653 else if (spaceComm == xpaw_mpi_comm_self) then 654 recvbuf(:,:,:)=xval(:,:,:) 655 end if 656 #else 657 recvbuf(:,:,:)=xval(:,:,:) 658 #endif 659 end subroutine xpaw_mpi_allgather_dp3d
ABINIT/xpaw_mpi_allgather_int1d [ Functions ]
NAME
xpaw_mpi_allgather_int1d
FUNCTION
MPI_ALLGATHER for 1D integer arrays
INPUTS
xval= buffer array nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received elements
SOURCE
514 subroutine xpaw_mpi_allgather_int1d(xval,nelem,recvbuf,spaceComm,ier) 515 516 !Arguments------------------------- 517 integer, intent(in) :: xval(:) 518 integer, intent(inout) :: recvbuf(:) 519 integer, intent(in) :: nelem,spaceComm 520 integer, intent(out) :: ier 521 522 ! ************************************************************************* 523 ier=0 524 #if defined HAVE_MPI 525 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 526 call MPI_ALLGATHER(xval,nelem,MPI_INTEGER,recvbuf,nelem,MPI_INTEGER,spaceComm,ier) 527 else if (spaceComm == xpaw_mpi_comm_self) then 528 recvbuf(1:nelem)=xval(1:nelem) 529 end if 530 #else 531 recvbuf(1:nelem)=xval(1:nelem) 532 #endif 533 end subroutine xpaw_mpi_allgather_int1d
ABINIT/xpaw_mpi_allgatherv_dp1d [ Functions ]
NAME
xpaw_mpi_allgatherv_dp1d
FUNCTION
MPI_ALLGATHERV for 1D double precision arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
737 subroutine xpaw_mpi_allgatherv_dp1d(xval,nelem,recvbuf,recvcounts,displs,spaceComm,ier) 738 739 !Arguments------------------------- 740 real(dp), intent(in) :: xval(:) 741 real(dp), intent(inout) :: recvbuf(:) 742 integer, intent(in) :: recvcounts(:),displs(:) 743 integer, intent(in) :: nelem,spaceComm 744 integer, intent(out) :: ier 745 746 !Local variables-------------- 747 integer :: cc,dd 748 749 ! ************************************************************************* 750 ier=0 751 #if defined HAVE_MPI 752 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 753 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 754 & MPI_DOUBLE_PRECISION,spaceComm,ier) 755 else if (spaceComm == xpaw_mpi_comm_self) then 756 #endif 757 dd=0;if (size(displs)>0) dd=displs(1) 758 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 759 recvbuf(dd+1:dd+cc)=xval(1:cc) 760 #if defined HAVE_MPI 761 end if 762 #endif 763 end subroutine xpaw_mpi_allgatherv_dp1d
ABINIT/xpaw_mpi_allgatherv_dp2d [ Functions ]
NAME
xpaw_mpi_allgatherv_dp2d
FUNCTION
MPI_ALLGATHERV for 2D double precision arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
788 subroutine xpaw_mpi_allgatherv_dp2d(xval,nelem,recvbuf,recvcounts,displs,spaceComm,ier) 789 790 !Arguments------------------------- 791 real(dp), intent(in) :: xval(:,:) 792 real(dp), intent(inout) :: recvbuf(:,:) 793 integer, intent(in) :: recvcounts(:),displs(:) 794 integer, intent(in) :: nelem,spaceComm 795 integer, intent(out) :: ier 796 797 !Local variables-------------- 798 integer :: cc,dd,sz1 799 800 ! ************************************************************************* 801 ier=0 802 #if defined HAVE_MPI 803 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 804 call MPI_ALLGATHERV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 805 & MPI_DOUBLE_PRECISION,spaceComm,ier) 806 else if (spaceComm == xpaw_mpi_comm_self) then 807 #endif 808 sz1=size(xval,1) 809 dd=0;if (size(displs)>0) dd=displs(1)/sz1 810 cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1 811 recvbuf(:,dd+1:dd+cc)=xval(:,1:cc) 812 #if defined HAVE_MPI 813 end if 814 #endif 815 end subroutine xpaw_mpi_allgatherv_dp2d
ABINIT/xpaw_mpi_allgatherv_int1d [ Functions ]
NAME
xpaw_mpi_allgatherv_int1d
FUNCTION
MPI_ALLGATHERV for 1D integer arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
686 subroutine xpaw_mpi_allgatherv_int1d(xval,nelem,recvbuf,recvcounts,displs,spaceComm,ier) 687 688 !Arguments------------------------- 689 integer, intent(in) :: xval(:) 690 integer, intent(inout) :: recvbuf(:) 691 integer, intent(in) :: recvcounts(:),displs(:) 692 integer, intent(in) :: nelem,spaceComm 693 integer, intent(out) :: ier 694 695 !Local variables------------------- 696 integer :: cc,dd 697 698 ! ************************************************************************* 699 ier=0 700 #if defined HAVE_MPI 701 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 702 call MPI_ALLGATHERV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,& 703 & MPI_INTEGER,spaceComm,ier) 704 else if (spaceComm == xpaw_mpi_comm_self) then 705 #endif 706 dd=0;if (size(displs)>0) dd=displs(1) 707 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 708 recvbuf(dd+1:dd+cc)=xval(1:cc) 709 #if defined HAVE_MPI 710 end if 711 #endif 712 end subroutine xpaw_mpi_allgatherv_int1d
ABINIT/xpaw_mpi_alltoall_dp1d [ Functions ]
NAME
xpaw_mpi_alltoall_dp1d
FUNCTION
MPI_ALLTOALL for 1D double precision arrays
INPUTS
xval= buffer array sendsize= size of sent buffer recvsize= size of received buffer spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1044 subroutine xpaw_mpi_alltoall_dp1d(xval,sendsize,recvbuf,recvsize,spaceComm,ier) 1045 1046 !Arguments------------------------- 1047 real(dp), intent(in) :: xval(:) 1048 real(dp), intent(inout) :: recvbuf(:) 1049 integer, intent(in) :: sendsize, recvsize 1050 integer, intent(in) :: spaceComm 1051 integer, intent(out) :: ier 1052 1053 ! ************************************************************************* 1054 ier=0 1055 #if defined HAVE_MPI 1056 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1057 call MPI_ALLTOALL(xval, sendsize, MPI_DOUBLE_PRECISION, recvbuf, & 1058 & recvsize, MPI_DOUBLE_PRECISION, spaceComm, ier) 1059 else if (spaceComm == xpaw_mpi_comm_self) then 1060 recvbuf=xval 1061 end if 1062 #else 1063 recvbuf=xval 1064 #endif 1065 end subroutine xpaw_mpi_alltoall_dp1d
ABINIT/xpaw_mpi_alltoall_dp2d [ Functions ]
NAME
xpaw_mpi_alltoall_dp2d
FUNCTION
MPI_ALLTOALL for 2D double precision arrays
INPUTS
xval= buffer array sendsize= size of sent buffer recvsize= size of received buffer spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1089 subroutine xpaw_mpi_alltoall_dp2d(xval,sendsize,recvbuf,recvsize,spaceComm,ier) 1090 1091 !Arguments------------------------- 1092 real(dp), intent(in) :: xval(:,:) 1093 real(dp), intent(inout) :: recvbuf(:,:) 1094 integer, intent(in) :: sendsize, recvsize 1095 integer, intent(in) :: spaceComm 1096 integer, intent(out) :: ier 1097 1098 ! ************************************************************************* 1099 ier=0 1100 #if defined HAVE_MPI 1101 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1102 call MPI_ALLTOALL(xval, sendsize, MPI_DOUBLE_PRECISION, recvbuf, & 1103 & recvsize, MPI_DOUBLE_PRECISION, spaceComm, ier) 1104 else if (spaceComm == xpaw_mpi_comm_self) then 1105 recvbuf=xval 1106 end if 1107 #else 1108 recvbuf=xval 1109 #endif 1110 end subroutine xpaw_mpi_alltoall_dp2d
ABINIT/xpaw_mpi_alltoall_int1d [ Functions ]
NAME
xpaw_mpi_alltoall_int1d
FUNCTION
MPI_ALLTOALL for 1D integer arrays
INPUTS
xval= buffer array sendsize= size of sent buffer recvsize= size of received buffer spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
997 subroutine xpaw_mpi_alltoall_int1d(xval,sendsize,recvbuf,recvsize,spaceComm,ier) 998 999 !Arguments------------------------- 1000 integer, intent(in) :: xval(:) 1001 integer, intent(inout) :: recvbuf(:) 1002 integer, intent(in) :: sendsize, recvsize 1003 integer, intent(in) :: spaceComm 1004 integer, intent(out) :: ier 1005 1006 !Local variables------------------- 1007 1008 ! ************************************************************************* 1009 ier=0 1010 #if defined HAVE_MPI 1011 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1012 call MPI_ALLTOALL(xval, sendsize, MPI_INTEGER, recvbuf, & 1013 & recvsize, MPI_INTEGER, spaceComm, ier) 1014 else if (spaceComm == xpaw_mpi_comm_self) then 1015 recvbuf=xval 1016 end if 1017 #else 1018 recvbuf=xval 1019 #endif 1020 end subroutine xpaw_mpi_alltoall_int1d
ABINIT/xpaw_mpi_alltoallv_dp1d [ Functions ]
NAME
xpaw_mpi_alltoallv_dp1d
FUNCTION
MPI_ALLTOALLV for 1D double precision arrays
INPUTS
xval= buffer array sendcnts= number of elements to send to each processor sdispls= displacements from which to take the outgoing data recvcnts= number of elements that can be received from each processor rdispls= displacement at which to place the incoming data from each processor comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1194 subroutine xpaw_mpi_alltoallv_dp1d(xval,sendcnts,sdispls,recvbuf,recvcnts,rdispls,comm,ier) 1195 1196 !Arguments------------------------- 1197 real(dp), intent(in) :: xval(:) 1198 real(dp), intent(inout) :: recvbuf(:) 1199 integer, intent(in) :: sendcnts(:),sdispls(:),recvcnts(:),rdispls(:) 1200 integer, intent(in) :: comm 1201 integer, intent(out) :: ier 1202 1203 !Local variables------------------- 1204 integer :: sc,sds,sdr 1205 1206 ! ********************************************************************* 1207 ier=0 1208 #if defined HAVE_MPI 1209 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 1210 call MPI_ALLTOALLV(xval,sendcnts,sdispls,MPI_DOUBLE_PRECISION,recvbuf,& 1211 & recvcnts,rdispls,MPI_DOUBLE_PRECISION,comm,ier) 1212 else if (comm == MPI_COMM_SELF) then 1213 #endif 1214 sds=0;if (size(sdispls)>0) sds=sdispls(1) 1215 sdr=0;if (size(rdispls)>0) sdr=rdispls(1) 1216 sc=size(xval);if (size(sendcnts)>0) sc=sendcnts(1) 1217 recvbuf(sdr+1:sdr+sc)=xval(sds+1:sds+sc) 1218 #if defined HAVE_MPI 1219 end if 1220 #endif 1221 end subroutine xpaw_mpi_alltoallv_dp1d
ABINIT/xpaw_mpi_alltoallv_dp2d [ Functions ]
NAME
xpaw_mpi_alltoallv_dp2d
FUNCTION
MPI_ALLTOALLV for 2D double precision arrays
INPUTS
xval= buffer array sendcnts= number of elements to send to each processor sdispls= displacements from which to take the outgoing data recvcnts= number of elements that can be received from each processor rdispls= displacement at which to place the incoming data from each processor comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1247 subroutine xpaw_mpi_alltoallv_dp2d(xval,sendcnts,sdispls,recvbuf,recvcnts,rdispls,comm,ier) 1248 1249 !Arguments------------------------- 1250 real(dp), intent(in) :: xval(:,:) 1251 real(dp), intent(inout) :: recvbuf(:,:) 1252 integer, intent(in) :: sendcnts(:),sdispls(:),rdispls(:),recvcnts(:) 1253 integer,intent(in) :: comm 1254 integer,intent(out) :: ier 1255 1256 !Local variables------------------- 1257 integer :: sc,sds,sdr,sz1 1258 1259 ! ********************************************************************* 1260 ier=0 1261 #if defined HAVE_MPI 1262 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 1263 call MPI_ALLTOALLV(xval,sendcnts,sdispls,MPI_DOUBLE_PRECISION,recvbuf,& 1264 & recvcnts,rdispls,MPI_DOUBLE_PRECISION,comm,ier) 1265 else if (comm == xpaw_mpi_comm_self) then 1266 #endif 1267 sz1=size(xval,1) 1268 sds=0;if (size(sdispls)>0) sds=sdispls(1)/sz1 1269 sdr=0;if (size(rdispls)>0) sdr=rdispls(1)/sz1 1270 sc=size(xval,2);if (size(sendcnts)>0) sc=sendcnts(1)/sz1 1271 recvbuf(:,sdr+1:sdr+sc)=xval(:,sds+1:sds+sc) 1272 #if defined HAVE_MPI 1273 end if 1274 #endif 1275 end subroutine xpaw_mpi_alltoallv_dp2d
ABINIT/xpaw_mpi_alltoallv_int1d [ Functions ]
NAME
xpaw_mpi_alltoallv_int1d
FUNCTION
MPI_ALLTOALLV for 1D integer arrays
INPUTS
xval= buffer array sendcnts= number of elements to send to each processor sdispls= displacements from which to take the outgoing data recvcnts= number of elements that can be received from each processor rdispls= displacement at which to place the incoming data from each processor comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1138 subroutine xpaw_mpi_alltoallv_int1d(xval,sendcnts,sdispls,recvbuf,recvcnts,rdispls,comm,ier) 1139 1140 !Arguments------------------------- 1141 integer, intent(in) :: xval(:) 1142 integer, intent(inout) :: recvbuf(:) 1143 integer, intent(in) :: sendcnts(:),sdispls(:),recvcnts(:) 1144 integer, intent(in) :: comm, rdispls 1145 integer, intent(out) :: ier 1146 1147 !Local variables------------------- 1148 integer :: sc,sds,sdr 1149 #if defined HAVE_MPI 1150 integer :: rdispls_on(size(sendcnts)) 1151 #endif 1152 1153 ! ********************************************************************* 1154 ier=0 1155 #if defined HAVE_MPI 1156 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 1157 rdispls_on = 0 1158 call MPI_ALLTOALLV(xval,sendcnts,sdispls,MPI_INTEGER,recvbuf,& 1159 & recvcnts,rdispls_on,MPI_INTEGER,comm,ier) 1160 else if (comm == xpaw_mpi_comm_self) then 1161 #endif 1162 sdr=rdispls;sds=0;if (size(sdispls)>0) sds=sdispls(1) 1163 sc=size(xval);if (size(sendcnts)>0) sc=sendcnts(1) 1164 recvbuf(1:sc)=xval(sds+1:sds+sc) 1165 #if defined HAVE_MPI 1166 end if 1167 #endif 1168 end subroutine xpaw_mpi_alltoallv_int1d
ABINIT/xpaw_mpi_bcast_dp1d [ Functions ]
NAME
xpaw_mpi_bcast_dp1d
FUNCTION
MPI_BCAST for 1D double precision arrays
INPUTS
spaceComm= MPI communicator master= master MPI node
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
1381 subroutine xpaw_mpi_bcast_dp1d(xval,master,spaceComm,ier) 1382 1383 !Arguments------------------------- 1384 real(dp), intent(inout) :: xval(:) 1385 integer, intent(in) :: spaceComm,master 1386 integer, intent(out) :: ier 1387 1388 !Local variables------------------- 1389 #if defined HAVE_MPI 1390 integer :: n 1391 #endif 1392 1393 ! ************************************************************************* 1394 ier=0 1395 #if defined HAVE_MPI 1396 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1397 n=size(xval,dim=1) 1398 call MPI_BCAST(xval,n,MPI_DOUBLE_PRECISION,master,spaceComm,ier) 1399 end if 1400 #endif 1401 end subroutine xpaw_mpi_bcast_dp1d
ABINIT/xpaw_mpi_bcast_dp2d [ Functions ]
NAME
xpaw_mpi_bcast_dp2d
FUNCTION
MPI_BCAST for 2D double precision arrays
INPUTS
spaceComm= MPI communicator master= master MPI node
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
1423 subroutine xpaw_mpi_bcast_dp2d(xval,master,spaceComm,ier) 1424 1425 !Arguments------------------------- 1426 real(dp), intent(inout) :: xval(:,:) 1427 integer, intent(in) :: spaceComm,master 1428 integer, intent(out) :: ier 1429 1430 !Local variables------------------- 1431 #if defined HAVE_MPI 1432 integer :: n1,n2 1433 #endif 1434 1435 ! ************************************************************************* 1436 ier=0 1437 #if defined HAVE_MPI 1438 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1439 n1=size(xval,dim=1) ; n2=size(xval,dim=2) 1440 call MPI_BCAST(xval,n1*n2,MPI_DOUBLE_PRECISION,master,spaceComm,ier) 1441 end if 1442 #endif 1443 end subroutine xpaw_mpi_bcast_dp2d
ABINIT/xpaw_mpi_bcast_dp3d [ Functions ]
NAME
xpaw_mpi_bcast_dp3d
FUNCTION
MPI_BCAST for 3D double precision arrays
INPUTS
spaceComm= MPI communicator master= master MPI node
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
1465 subroutine xpaw_mpi_bcast_dp3d(xval,master,spaceComm,ier) 1466 1467 !Arguments------------------------- 1468 real(dp), intent(inout) :: xval(:,:,:) 1469 integer, intent(in) :: spaceComm,master 1470 integer, intent(out) :: ier 1471 1472 !Local variables------------------- 1473 #if defined HAVE_MPI 1474 integer :: n1,n2,n3 1475 #endif 1476 1477 ! ************************************************************************* 1478 ier=0 1479 #if defined HAVE_MPI 1480 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1481 n1=size(xval,dim=1) ; n2=size(xval,dim=2) ; n3=size(xval,dim=3) 1482 call MPI_BCAST(xval,n1*n2*n3,MPI_DOUBLE_PRECISION,master,spaceComm,ier) 1483 end if 1484 #endif 1485 end subroutine xpaw_mpi_bcast_dp3d
ABINIT/xpaw_mpi_bcast_int [ Functions ]
NAME
xpaw_mpi_bcast_int
FUNCTION
MPI_BCAST for integers
INPUTS
spaceComm= MPI communicator master= master MPI node
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer
SOURCE
1299 subroutine xpaw_mpi_bcast_int(xval,master,spaceComm,ier) 1300 1301 !Arguments ------------------------------------ 1302 integer, intent(inout) :: xval 1303 integer, intent(in) :: spaceComm,master 1304 integer, intent(out) :: ier 1305 1306 !Local variables------------------------------- 1307 #if defined HAVE_MPI 1308 integer :: arr_xval(1) 1309 #endif 1310 1311 ! ************************************************************************* 1312 ier=0 1313 #if defined HAVE_MPI 1314 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1315 arr_xval(1)=xval 1316 call MPI_BCAST(arr_xval,1,MPI_INTEGER,master,spaceComm,ier) 1317 xval=arr_xval(1) 1318 end if 1319 #endif 1320 end subroutine xpaw_mpi_bcast_int
ABINIT/xpaw_mpi_bcast_int1d [ Functions ]
NAME
xpaw_mpi_bcast_int1d
FUNCTION
MPI_BCAST for 1D integer arrays
INPUTS
spaceComm= MPI communicator master= master MPI node
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
1342 subroutine xpaw_mpi_bcast_int1d(xval,master,spaceComm,ier) 1343 1344 !Arguments ------------------------------------ 1345 integer, intent(inout) :: xval(:) 1346 integer, intent(in) :: spaceComm,master 1347 integer, intent(out) :: ier 1348 1349 !Local variables------------------------------- 1350 integer :: n 1351 1352 ! ************************************************************************* 1353 ier=0 1354 #if defined HAVE_MPI 1355 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1356 n=size(xval) 1357 call MPI_BCAST(xval,n,MPI_INTEGER,master,spaceComm,ier) 1358 end if 1359 #endif 1360 end subroutine xpaw_mpi_bcast_int1d
ABINIT/xpaw_mpi_exch_dp1d [ Functions ]
NAME
xpaw_mpi_exch_dp1d
FUNCTION
MPI_SEND/MPI_RECV for 1D double precision arrays
INPUTS
mtag= message tag n1= first dimension of the array vsend= send buffer sender= node sending the data recever= node receiving the data spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
vrecv= receive buffer
SOURCE
2515 subroutine xpaw_mpi_exch_dp1d(vsend,n1,sender,vrecv,recever,spaceComm,mtag,ier) 2516 2517 !Arguments---------------- 2518 integer,intent(in) :: mtag,n1 2519 real(dp), intent(in) :: vsend(:) 2520 real(dp), intent(inout) :: vrecv(:) 2521 integer, intent(in) :: sender,recever,spaceComm 2522 integer, intent(out) :: ier 2523 2524 !Local variables-------------- 2525 #if defined HAVE_MPI 2526 integer :: status(MPI_STATUS_SIZE) 2527 integer :: tag,me 2528 #endif 2529 2530 ! ************************************************************************* 2531 ier=0 2532 #if defined HAVE_MPI 2533 if (sender==recever.or.spaceComm==xpaw_mpi_comm_null.or.(n1==0)) return 2534 call MPI_COMM_RANK(spaceComm,me,ier) 2535 tag = MOD(mtag,xpaw_mpi_get_tag_ub(spaceComm)) 2536 if (recever==me) then 2537 call MPI_RECV(vrecv,n1,MPI_DOUBLE_PRECISION,sender,tag,spaceComm,status,ier) 2538 end if 2539 if (sender==me) then 2540 call MPI_SEND(vsend,n1,MPI_DOUBLE_PRECISION,recever,tag,spaceComm,ier) 2541 end if 2542 #endif 2543 end subroutine xpaw_mpi_exch_dp1d
ABINIT/xpaw_mpi_exch_dp2d [ Functions ]
NAME
xpaw_mpi_exch_dp2d
FUNCTION
MPI_SEND/MPI_RECV for 2D double precision arrays
INPUTS
mtag= message tag nt= vector length vsend= sent buffer sender= node sending the data recever= node receiving the data spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
vrecv= received buffer
SOURCE
2569 subroutine xpaw_mpi_exch_dp2d(vsend,nt,sender,vrecv,recever,spaceComm,mtag,ier) 2570 2571 !Arguments---------------- 2572 integer,intent(in) :: mtag,nt 2573 real(dp), intent(in) :: vsend(:,:) 2574 real(dp), intent(inout) :: vrecv(:,:) 2575 integer, intent(in) :: sender,recever,spaceComm 2576 integer, intent(out) :: ier 2577 2578 !Local variables-------------- 2579 #if defined HAVE_MPI 2580 integer :: status(MPI_STATUS_SIZE) 2581 integer :: tag,me 2582 #endif 2583 2584 ! ************************************************************************* 2585 ier=0 2586 #if defined HAVE_MPI 2587 if (sender==recever.or.spaceComm==xpaw_mpi_comm_null.or.(nt==0)) return 2588 call MPI_COMM_RANK(spaceComm,me,ier) 2589 tag = MOD(mtag,xpaw_mpi_get_tag_ub(spaceComm)) 2590 if (recever==me) then 2591 call MPI_RECV(vrecv,nt,MPI_DOUBLE_PRECISION,sender,tag,spaceComm,status,ier) 2592 end if 2593 if (sender==me) then 2594 call MPI_SEND(vsend,nt,MPI_DOUBLE_PRECISION,recever,tag,spaceComm,ier) 2595 end if 2596 #endif 2597 end subroutine xpaw_mpi_exch_dp2d
ABINIT/xpaw_mpi_exch_dp3d [ Functions ]
NAME
mtag= message tag xpaw_mpi_exch_dp3d
FUNCTION
MPI_SEND/MPI_RECV for 3D double precision arrays
INPUTS
mtag= message tag nt= vector length vsend= sent buffer sender= node sending the data recever= node receiving the data spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
vrecv= received buffer
SOURCE
2624 subroutine xpaw_mpi_exch_dp3d(vsend,nt,sender,vrecv,recever,spaceComm,mtag,ier) 2625 2626 !Arguments---------------- 2627 integer,intent(in) :: mtag,nt 2628 real(dp), intent(in) :: vsend(:,:,:) 2629 real(dp), intent(inout) :: vrecv(:,:,:) 2630 integer, intent(in) :: sender,recever,spaceComm 2631 integer, intent(out) :: ier 2632 2633 !Local variables-------------- 2634 #if defined HAVE_MPI 2635 integer :: status(MPI_STATUS_SIZE) 2636 integer :: tag,me 2637 #endif 2638 2639 ! ************************************************************************* 2640 ier=0 2641 #if defined HAVE_MPI 2642 if (sender==recever.or.spaceComm==xpaw_mpi_comm_null.or.(nt==0)) return 2643 call MPI_COMM_RANK(spaceComm,me,ier) 2644 tag = MOD(mtag,xpaw_mpi_get_tag_ub(spaceComm)) 2645 if (recever==me) then 2646 call MPI_RECV(vrecv,nt,MPI_DOUBLE_PRECISION,sender,tag,spaceComm,status,ier) 2647 end if 2648 if (sender==me) then 2649 call MPI_SEND(vsend,nt,MPI_DOUBLE_PRECISION,recever,tag,spaceComm,ier) 2650 end if 2651 #endif 2652 end subroutine xpaw_mpi_exch_dp3d
ABINIT/xpaw_mpi_exch_int1d [ Functions ]
NAME
xpaw_mpi_exch_int1d
FUNCTION
MPI_SEND/MPI_RECV for 1D integer arrays
INPUTS
mtag= message tag n1= vector length vsend= sent buffer sender= node sending the data recever= node receiving the data spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
vrecv= received buffer
SOURCE
2461 subroutine xpaw_mpi_exch_int1d(vsend,n1,sender,vrecv,recever,spaceComm,mtag,ier) 2462 2463 !Arguments---------------- 2464 integer, intent(in) :: mtag,n1 2465 integer, intent(in) :: vsend(:) 2466 integer, intent(inout) :: vrecv(:) 2467 integer, intent(in) :: sender,recever,spaceComm 2468 integer, intent(out) :: ier 2469 2470 !Local variables-------------- 2471 #if defined HAVE_MPI 2472 integer :: status(MPI_STATUS_SIZE) 2473 integer :: tag,me 2474 #endif 2475 2476 ! ************************************************************************* 2477 ier=0 2478 #if defined HAVE_MPI 2479 if (sender==recever.or.spaceComm==xpaw_mpi_comm_null.or.(n1==0)) return 2480 call MPI_COMM_RANK(spaceComm,me,ier) 2481 tag = MOD(mtag,xpaw_mpi_get_tag_ub(spaceComm)) 2482 if (recever==me) then 2483 call MPI_RECV(vrecv,n1,MPI_INTEGER,sender,tag,spaceComm,status,ier) 2484 end if 2485 if (sender==me) then 2486 call MPI_SEND(vsend,n1,MPI_INTEGER,recever,tag,spaceComm,ier) 2487 end if 2488 #endif 2489 end subroutine xpaw_mpi_exch_int1d
ABINIT/xpaw_mpi_gather_dp1d [ Functions ]
NAME
xpaw_mpi_gather_dp1d
FUNCTION
MPI_GATHER for 1D double precision arrays
INPUTS
xval= buffer array sendcont= number of sent elements recvcount= number of received elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1557 subroutine xpaw_mpi_gather_dp1d(xval,sendcount,recvbuf,recvcount,root,spaceComm,ier) 1558 1559 !Arguments------------------------- 1560 integer, intent(in) :: sendcount,recvcount 1561 real(dp), intent(in) :: xval(:) 1562 real(dp), intent(inout) :: recvbuf(:) 1563 integer, intent(in) :: root,spaceComm 1564 integer, intent(out) :: ier 1565 1566 ! ************************************************************************* 1567 ier=0 1568 #if defined HAVE_MPI 1569 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1570 call MPI_gather(xval,sendcount,MPI_DOUBLE_PRECISION,recvbuf,recvcount,MPI_DOUBLE_PRECISION,& 1571 & root,spaceComm,ier) 1572 else if (spaceComm == xpaw_mpi_comm_self) then 1573 recvbuf=xval 1574 end if 1575 #else 1576 recvbuf=xval 1577 #endif 1578 end subroutine xpaw_mpi_gather_dp1d
ABINIT/xpaw_mpi_gather_dp2d [ Functions ]
NAME
xpaw_mpi_gather_dp2d
FUNCTION
MPI_GATHER for 2D double precision arrays
INPUTS
xval= buffer array sendcont= number of sent elements recvcount= number of received elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1603 subroutine xpaw_mpi_gather_dp2d(xval,sendcount,recvbuf,recvcount,root,spaceComm,ier) 1604 1605 !Arguments------------------------- 1606 integer, intent(in) :: sendcount,recvcount 1607 real(dp), intent(in) :: xval(:,:) 1608 real(dp), intent(inout) :: recvbuf(:,:) 1609 integer, intent(in) :: root,spaceComm 1610 integer, intent(out) :: ier 1611 1612 ! ************************************************************************* 1613 ier=0 1614 #if defined HAVE_MPI 1615 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1616 call MPI_gather(xval,sendcount,MPI_DOUBLE_PRECISION,recvbuf,recvcount,MPI_DOUBLE_PRECISION,& 1617 & root,spaceComm,ier) 1618 else if (spaceComm == xpaw_mpi_comm_self) then 1619 recvbuf=xval 1620 end if 1621 #else 1622 recvbuf=xval 1623 #endif 1624 end subroutine xpaw_mpi_gather_dp2d
ABINIT/xpaw_mpi_gather_int1d [ Functions ]
NAME
xpaw_mpi_gather_int1d
FUNCTION
MPI_GATHER for 1D integer arrays
INPUTS
xval= buffer array sendcont= number of sent elements recvcount= number of received elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1512 subroutine xpaw_mpi_gather_int1d(xval,sendcount,recvbuf,recvcount,root,spaceComm,ier) 1513 1514 !Arguments------------------------- 1515 integer, intent(in) :: sendcount,recvcount 1516 integer, intent(in) :: xval(:) 1517 integer, intent(inout) :: recvbuf(:) 1518 integer, intent(in) :: root,spaceComm 1519 integer, intent(out) :: ier 1520 1521 ! ************************************************************************* 1522 ier=0 1523 #if defined HAVE_MPI 1524 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1525 call MPI_gather(xval,sendcount,MPI_INTEGER,recvbuf,recvcount,MPI_INTEGER,root,spaceComm,ier) 1526 else if (spaceComm == xpaw_mpi_comm_self) then 1527 recvbuf=xval 1528 end if 1529 #else 1530 recvbuf=xval 1531 #endif 1532 end subroutine xpaw_mpi_gather_int1d
ABINIT/xpaw_mpi_gatherv_dp1d [ Functions ]
NAME
xpaw_mpi_gatherv_dp1d
FUNCTION
MPI_GATHERV for 1D double precision arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1704 subroutine xpaw_mpi_gatherv_dp1d(xval,nelem,recvbuf,recvcounts,displs,root,spaceComm,ier) 1705 1706 !Arguments------------------------- 1707 real(dp), intent(in) :: xval(:) 1708 real(dp), intent(inout) :: recvbuf(:) 1709 integer, intent(in) :: recvcounts(:),displs(:) 1710 integer, intent(in) :: nelem,root,spaceComm 1711 integer, intent(out) :: ier 1712 1713 !Local variables-------------- 1714 integer :: cc,dd 1715 1716 ! ************************************************************************* 1717 ier=0 1718 #if defined HAVE_MPI 1719 if (spaceComm /=xpaw_mpi_comm_self .and. spaceComm /=xpaw_mpi_comm_null) then 1720 call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 1721 & MPI_DOUBLE_PRECISION,root,spaceComm,ier) 1722 else if (spaceComm ==xpaw_mpi_comm_self) then 1723 #endif 1724 dd=0;if (size(displs)>0) dd=displs(1) 1725 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 1726 recvbuf(dd+1:dd+cc)=xval(1:cc) 1727 #if defined HAVE_MPI 1728 end if 1729 #endif 1730 end subroutine xpaw_mpi_gatherv_dp1d
ABINIT/xpaw_mpi_gatherv_dp2d [ Functions ]
NAME
xpaw_mpi_gatherv_dp2d
FUNCTION
MPI_GATHERV for 2D double precision arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1756 subroutine xpaw_mpi_gatherv_dp2d(xval,nelem,recvbuf,recvcounts,displs,root,spaceComm,ier) 1757 1758 !Arguments------------------------- 1759 real(dp), intent(in) :: xval(:,:) 1760 real(dp), intent(inout) :: recvbuf(:,:) 1761 integer, intent(in) :: recvcounts(:),displs(:) 1762 integer, intent(in) :: nelem,root,spaceComm 1763 integer, intent(out) :: ier 1764 1765 !Local variables-------------- 1766 integer :: cc,dd,sz1 1767 1768 ! ************************************************************************* 1769 ier=0 1770 #if defined HAVE_MPI 1771 if (spaceComm /=xpaw_mpi_comm_self .and. spaceComm /=xpaw_mpi_comm_null) then 1772 call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,& 1773 & MPI_DOUBLE_PRECISION,root,spaceComm,ier) 1774 else if (spaceComm ==xpaw_mpi_comm_self) then 1775 #endif 1776 sz1=size(xval,1) 1777 dd=0;if (size(displs)>0) dd=displs(1)/sz1 1778 cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1 1779 recvbuf(:,dd+1:dd+cc)=xval(:,1:cc) 1780 #if defined HAVE_MPI 1781 end if 1782 #endif 1783 end subroutine xpaw_mpi_gatherv_dp2d
ABINIT/xpaw_mpi_gatherv_int1d [ Functions ]
NAME
xpaw_mpi_gatherv_int1d
FUNCTION
MPI_GATHERV for 1D integer arrays
INPUTS
xval= buffer array recvcounts= number of received elements displs= relative offsets for incoming data nelem= number of elements root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
1652 subroutine xpaw_mpi_gatherv_int1d(xval,nelem,recvbuf,recvcounts,displs,root,spaceComm,ier) 1653 1654 !Arguments------------------------- 1655 integer, intent(in) :: xval(:) 1656 integer, intent(inout) :: recvbuf(:) 1657 integer, intent(in) :: recvcounts(:),displs(:) 1658 integer, intent(in) :: nelem,root,spaceComm 1659 integer, intent(out) :: ier 1660 1661 !Local variables------------------- 1662 integer :: cc,dd 1663 1664 ! ************************************************************************* 1665 ier=0 1666 #if defined HAVE_MPI 1667 if (spaceComm /=xpaw_mpi_comm_self .and. spaceComm /=xpaw_mpi_comm_null) then 1668 call MPI_gatherV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,& 1669 & MPI_INTEGER,root,spaceComm,ier) 1670 else if (spaceComm ==xpaw_mpi_comm_self) then 1671 #endif 1672 dd=0;if (size(displs)>0) dd=displs(1) 1673 cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1) 1674 recvbuf(dd+1:dd+cc)=xval(1:cc) 1675 #if defined HAVE_MPI 1676 end if 1677 #endif 1678 end subroutine xpaw_mpi_gatherv_int1d
ABINIT/xpaw_mpi_get_tag_ub [ Functions ]
NAME
xpaw_mpi_get_tag_ub
FUNCTION
Get MPI_TAG_UB attribute
INPUTS
comm= MPI communicator
OUTPUT
xpaw_mpi_get_tag_ub=value for the MPI_TAG_UB attribute attached to comm
SOURCE
2928 function xpaw_mpi_get_tag_ub(comm) 2929 2930 !Arguments------------------------- 2931 integer, intent(in) :: comm 2932 integer :: xpaw_mpi_get_tag_ub 2933 2934 !Local variables------------------- 2935 #if defined HAVE_MPI 2936 integer :: attribute_val,ier 2937 logical :: lflag 2938 #endif 2939 2940 ! ************************************************************************* 2941 2942 #if defined HAVE_MPI 2943 !Deprecated in MPI2 but not all MPI2 implementations provide MPI_Comm_get_attr ! 2944 call MPI_ATTR_GET(comm,MPI_TAG_UB,attribute_val,lflag,ier) 2945 !call MPI_Comm_get_attr(comm MPI_TAG_UB,attribute_val,lflag,ier) 2946 2947 if (lflag) xpaw_mpi_get_tag_ub = attribute_val 2948 2949 #else 2950 xpaw_mpi_get_tag_ub=32767 2951 #endif 2952 2953 end function xpaw_mpi_get_tag_ub
ABINIT/xpaw_mpi_irecv_dp1d [ Functions ]
NAME
xpaw_mpi_irecv_dp1d
FUNCTION
MPI_IRECV for 1D double precision arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_IRECV is explicitly ignored
SOURCE
2048 subroutine xpaw_mpi_irecv_dp1d(xval,source,tag,spaceComm,request,ierr) 2049 2050 !Arguments------------------------- 2051 real(dp), intent(inout) :: xval(:) 2052 integer, intent(in) :: source,tag,spaceComm 2053 integer, intent(out) :: request, ierr 2054 2055 !Local variables------------------- 2056 #if defined HAVE_MPI 2057 integer :: ier,my_tag,n1 2058 #endif 2059 2060 ! ************************************************************************* 2061 ierr=0 2062 #if defined HAVE_MPI 2063 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2064 n1=size(xval,dim=1) 2065 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2066 call MPI_IRECV(xval,n1,MPI_DOUBLE_PRECISION,source,my_tag,spaceComm,request,ier) 2067 ierr=ier 2068 end if 2069 #endif 2070 end subroutine xpaw_mpi_irecv_dp1d
ABINIT/xpaw_mpi_irecv_dp2d [ Functions ]
NAME
xpaw_mpi_irecv_dp2d
FUNCTION
MPI_IRECV for 2D double precision arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_IRECV is explicitly ignored
SOURCE
2096 subroutine xpaw_mpi_irecv_dp2d(xval,source,tag,spaceComm,request,ierr) 2097 2098 !Arguments------------------------- 2099 real(dp), intent(inout) :: xval(:,:) 2100 integer, intent(in) :: source,tag,spaceComm 2101 integer, intent(out) :: request, ierr 2102 2103 !Local variables------------------- 2104 #if defined HAVE_MPI 2105 integer :: ier,my_tag,n1,n2 2106 #endif 2107 2108 ! ************************************************************************* 2109 ierr=0 2110 #if defined HAVE_MPI 2111 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2112 n1=size(xval,dim=1);n2=size(xval,dim=2) 2113 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2114 call MPI_IRECV(xval,n1*n2,MPI_DOUBLE_PRECISION,source,my_tag,spaceComm,request,ier) 2115 ierr=ier 2116 end if 2117 #endif 2118 end subroutine xpaw_mpi_irecv_dp2d
ABINIT/xpaw_mpi_irecv_int1d [ Functions ]
NAME
xpaw_mpi_irecv_int1d
FUNCTION
MPI_IRECV for 1D integer arrays
INPUTS
dest :: rank of destination process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error SIDE EFFETS xval= buffer array
NOTES
status of MPI_IRECV is explicitly ignored
SOURCE
2001 subroutine xpaw_mpi_irecv_int1d(xval,source,tag,spaceComm,request,ierr) 2002 2003 !Arguments------------------------- 2004 integer, intent(inout) :: xval(:) 2005 integer, intent(in) :: source,tag,spaceComm 2006 integer, intent(out) :: request,ierr 2007 !Local variables------------------- 2008 #if defined HAVE_MPI 2009 integer :: ier,n1,my_tag 2010 #endif 2011 2012 ! ************************************************************************* 2013 ierr=0 2014 #if defined HAVE_MPI 2015 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2016 n1=size(xval) 2017 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2018 call MPI_IRECV(xval,n1,MPI_INTEGER,source,my_tag,spaceComm,request,ier) 2019 ierr=ier 2020 end if 2021 #endif 2022 end subroutine xpaw_mpi_irecv_int1d
ABINIT/xpaw_mpi_isend_dp1d [ Functions ]
NAME
xpaw_mpi_isend_dp1d
FUNCTION
MPI_ISEND for 1D double precision arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2366 subroutine xpaw_mpi_isend_dp1d(xval,dest,tag,spaceComm,request,ierr) 2367 2368 !Arguments------------------------- 2369 real(dp), intent(inout) :: xval(:) 2370 integer, intent(in) :: dest,tag,spaceComm 2371 integer, intent(out) :: request,ierr 2372 2373 !Local variables------------------- 2374 #if defined HAVE_MPI 2375 integer :: ier,my_tag,n1 2376 #endif 2377 2378 ! ************************************************************************* 2379 ierr=0 2380 #if defined HAVE_MPI 2381 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2382 n1=size(xval,dim=1) 2383 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2384 call MPI_ISEND(xval,n1,MPI_DOUBLE_PRECISION,dest,my_tag,spaceComm,request,ier) 2385 ierr=ier 2386 end if 2387 #endif 2388 end subroutine xpaw_mpi_isend_dp1d
ABINIT/xpaw_mpi_isend_dp2d [ Functions ]
NAME
xpaw_mpi_isend_dp2d
FUNCTION
MPI_ISEND for 2D double precision arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2411 subroutine xpaw_mpi_isend_dp2d(xval,dest,tag,spaceComm,request,ierr) 2412 2413 !Arguments------------------------- 2414 real(dp), intent(inout) :: xval(:,:) 2415 integer, intent(in) :: dest,tag,spaceComm 2416 integer, intent(out) :: request,ierr 2417 2418 !Local variables------------------- 2419 #if defined HAVE_MPI 2420 integer :: ier,my_tag,n1,n2 2421 #endif 2422 2423 ! ************************************************************************* 2424 ierr=0 2425 #if defined HAVE_MPI 2426 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2427 n1=size(xval,dim=1) ; n1=size(xval,dim=2) 2428 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2429 call MPI_ISEND(xval,n1*n2,MPI_DOUBLE_PRECISION,dest,my_tag,spaceComm,request,ier) 2430 ierr=ier 2431 end if 2432 #endif 2433 end subroutine xpaw_mpi_isend_dp2d
ABINIT/xpaw_mpi_isend_int1d [ Functions ]
NAME
xpaw_mpi_isend_int1d
FUNCTION
MPI_ISEND for 1D integer arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ierr= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2321 subroutine xpaw_mpi_isend_int1d(xval,dest,tag,spaceComm,request,ierr) 2322 2323 !Arguments------------------------- 2324 integer, intent(inout) :: xval(:) 2325 integer, intent(in) :: dest,tag,spaceComm 2326 integer, intent(out) :: request,ierr 2327 2328 !Local variables------------------- 2329 #if defined HAVE_MPI 2330 integer :: ier,my_tag,n1 2331 #endif 2332 2333 ! ************************************************************************* 2334 ierr=0 2335 #if defined HAVE_MPI 2336 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2337 n1=size(xval,dim=1) 2338 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2339 call MPI_ISEND(xval,n1,MPI_INTEGER,dest,my_tag,spaceComm,request,ier) 2340 ierr=ier 2341 end if 2342 #endif 2343 end subroutine xpaw_mpi_isend_int1d
ABINIT/xpaw_mpi_recv_dp1d [ Functions ]
NAME
xpaw_mpi_recv_dp1d
FUNCTION
MPI_RECV for 1D double precision arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_RECV is explicitly ignored
SOURCE
1858 subroutine xpaw_mpi_recv_dp1d(xval,source,tag,spaceComm,ier) 1859 1860 !Arguments------------------------- 1861 real(dp), intent(inout) :: xval(:) 1862 integer, intent(in) :: source,tag,spaceComm 1863 integer, intent(out) :: ier 1864 1865 !Local variables------------------- 1866 #if defined HAVE_MPI 1867 integer :: n1,my_tag 1868 #endif 1869 1870 ! ************************************************************************* 1871 ier=0 1872 #if defined HAVE_MPI 1873 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1874 n1=size(xval,dim=1) 1875 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 1876 call MPI_RECV(xval,n1,MPI_DOUBLE_PRECISION,source,my_tag,spaceComm,MPI_STATUS_IGNORE,ier) 1877 end if 1878 #endif 1879 end subroutine xpaw_mpi_recv_dp1d
ABINIT/xpaw_mpi_recv_dp2d [ Functions ]
NAME
xpaw_mpi_recv_dp2d
FUNCTION
MPI_RECV for 2D double precision arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_RECV is explicitly ignored
SOURCE
1905 subroutine xpaw_mpi_recv_dp2d(xval,source,tag,spaceComm,ier) 1906 1907 !Arguments------------------------- 1908 real(dp), intent(inout) :: xval(:,:) 1909 integer, intent(in) :: source,tag,spaceComm 1910 integer, intent(out) :: ier 1911 1912 !Local variables------------------- 1913 #if defined HAVE_MPI 1914 integer :: n1,n2,my_tag 1915 #endif 1916 1917 ! ************************************************************************* 1918 ier=0 1919 #if defined HAVE_MPI 1920 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1921 n1=size(xval,dim=1) ; n2=size(xval,dim=2) 1922 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 1923 call MPI_RECV(xval,n1*n2,MPI_DOUBLE_PRECISION,source,my_tag,spaceComm,MPI_STATUS_IGNORE,ier) 1924 end if 1925 #endif 1926 end subroutine xpaw_mpi_recv_dp2d
ABINIT/xpaw_mpi_recv_dp3d [ Functions ]
NAME
xpaw_mpi_recv_dp3d
FUNCTION
MPI_RECV for 3D double precision arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_RECV is explicitly ignored
SOURCE
1952 subroutine xpaw_mpi_recv_dp3d(xval,source,tag,spaceComm,ier) 1953 1954 !Arguments------------------------- 1955 real(dp), intent(inout) :: xval(:,:,:) 1956 integer, intent(in) :: source,tag,spaceComm 1957 integer, intent(out) :: ier 1958 1959 !Local variables------------------- 1960 #if defined HAVE_MPI 1961 integer :: n1,n2,n3,my_tag 1962 #endif 1963 1964 ! ************************************************************************* 1965 ier=0 1966 #if defined HAVE_MPI 1967 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1968 n1=size(xval,dim=1) ; n2=size(xval,dim=2) ; n3=size(xval,dim=3) 1969 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 1970 call MPI_RECV(xval,n1*n2*n3,MPI_DOUBLE_PRECISION,source,my_tag,spaceComm,MPI_STATUS_IGNORE,ier) 1971 end if 1972 #endif 1973 end subroutine xpaw_mpi_recv_dp3d
ABINIT/xpaw_mpi_recv_int1d [ Functions ]
NAME
xpaw_mpi_recv_int1d
FUNCTION
MPI_RECV for 1D integer arrays
INPUTS
source :: rank of source process tag :: integer message tag spaceComm :: MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
NOTES
status of MPI_RECV is explicitly ignored
SOURCE
1811 subroutine xpaw_mpi_recv_int1d(xval,source,tag,spaceComm,ier) 1812 1813 !Arguments------------------------- 1814 integer, intent(inout) :: xval(:) 1815 integer, intent(in) :: source,tag,spaceComm 1816 integer, intent(out) :: ier 1817 1818 !Local variables------------------- 1819 #if defined HAVE_MPI 1820 integer :: my_tag, n1 1821 #endif 1822 1823 ! ************************************************************************* 1824 ier=0 1825 #if defined HAVE_MPI 1826 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 1827 n1=size(xval,dim=1) 1828 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 1829 call MPI_RECV(xval,n1,MPI_INTEGER,source,my_tag,spaceComm,MPI_STATUS_IGNORE,ier) 1830 end if 1831 #endif 1832 end subroutine xpaw_mpi_recv_int1d
ABINIT/xpaw_mpi_scatterv_dp1d [ Functions ]
NAME
xpaw_mpi_scatterv_dp1d
FUNCTION
MPI_SCATTERV for 1D double precision arrays
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
894 subroutine xpaw_mpi_scatterv_dp1d(xval,sendcounts,displs,recvbuf,recvcount,root,spaceComm,ier) 895 896 !Arguments------------------------- 897 real(dp), intent(in) :: xval(:) 898 real(dp), intent(inout) :: recvbuf(:) 899 integer, intent(in) :: sendcounts(:),displs(:) 900 integer, intent(in) :: recvcount,root,spaceComm 901 integer, intent(out) :: ier 902 903 !Local variables------------------- 904 integer :: dd 905 906 ! ************************************************************************* 907 ier=0 908 #if defined HAVE_MPI 909 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 910 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 911 & MPI_DOUBLE_PRECISION,root,spaceComm,ier) 912 else if (spaceComm == xpaw_mpi_comm_self) then 913 #endif 914 dd=0;if (size(displs)>0) dd=displs(1) 915 recvbuf(1:recvcount)=xval(dd+1:dd+recvcount) 916 #if defined HAVE_MPI 917 end if 918 #endif 919 end subroutine xpaw_mpi_scatterv_dp1d
ABINIT/xpaw_mpi_scatterv_dp2d [ Functions ]
NAME
xpaw_mpi_scatterv_dp2d
FUNCTION
MPI_SCATTERV for 2D double precision arrays
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
945 subroutine xpaw_mpi_scatterv_dp2d(xval,sendcounts,displs,recvbuf,recvcount,root,spaceComm,ier) 946 947 !Arguments------------------------- 948 real(dp), intent(in) :: xval(:,:) 949 real(dp), intent(inout) :: recvbuf(:,:) 950 integer, intent(in) :: sendcounts(:),displs(:) 951 integer, intent(in) :: recvcount,root,spaceComm 952 integer, intent(out) :: ier 953 954 !Local variables------------------- 955 integer :: cc,dd,sz1 956 957 ! ************************************************************************* 958 ier=0 959 #if defined HAVE_MPI 960 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 961 call MPI_SCATTERV(xval,sendcounts,displs,MPI_DOUBLE_PRECISION,recvbuf,recvcount,& 962 & MPI_DOUBLE_PRECISION,root,spaceComm,ier) 963 else if (spaceComm == xpaw_mpi_comm_self) then 964 #endif 965 sz1=size(recvbuf,1);cc=recvcount/sz1 966 dd=0;if (size(displs)>0) dd=displs(1)/sz1 967 recvbuf(:,1:cc)=xval(:,dd+1:dd+cc) 968 #if defined HAVE_MPI 969 end if 970 #endif 971 end subroutine xpaw_mpi_scatterv_dp2d
ABINIT/xpaw_mpi_scatterv_int1d [ Functions ]
NAME
xpaw_mpi_scatterv_int1d
FUNCTION
MPI_SCATTERV for 1D integer arrays
INPUTS
xval= buffer array recvcount= number of received elements displs= relative offsets for incoming data (array) sendcounts= number of sent elements (array) root= rank of receiving process spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
recvbuf= received buffer
SOURCE
843 subroutine xpaw_mpi_scatterv_int1d(xval,sendcounts,displs,recvbuf,recvcount,root,spaceComm,ier) 844 845 !Arguments------------------------- 846 integer, intent(in) :: xval(:) 847 integer, intent(inout) :: recvbuf(:) 848 integer, intent(in) :: sendcounts(:),displs(:) 849 integer, intent(in) :: recvcount,root,spaceComm 850 integer, intent(out) :: ier 851 852 !Local variables------------------- 853 integer :: dd 854 855 ! ************************************************************************* 856 ier=0 857 #if defined HAVE_MPI 858 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 859 call MPI_SCATTERV(xval,sendcounts,displs,MPI_INTEGER,recvbuf,recvcount,& 860 & MPI_INTEGER,root,spaceComm,ier) 861 else if (spaceComm == xpaw_mpi_comm_self) then 862 #endif 863 dd=0;if (size(displs)>0) dd=displs(1) 864 recvbuf(1:recvcount)=xval(dd+1:dd+recvcount) 865 #if defined HAVE_MPI 866 end if 867 #endif 868 end subroutine xpaw_mpi_scatterv_int1d
ABINIT/xpaw_mpi_send_dp1d [ Functions ]
NAME
xpaw_mpi_send_dp1d
FUNCTION
MPI_SEND for 1D double precision arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2187 subroutine xpaw_mpi_send_dp1d(xval,dest,tag,spaceComm,ier) 2188 2189 !Arguments------------------------- 2190 real(dp), intent(inout) :: xval(:) 2191 integer, intent(in) :: dest,tag,spaceComm 2192 integer, intent(out) :: ier 2193 2194 !Local variables------------------- 2195 #if defined HAVE_MPI 2196 integer :: n1,my_tag 2197 #endif 2198 2199 ! ************************************************************************* 2200 ier=0 2201 #if defined HAVE_MPI 2202 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2203 n1=size(xval,dim=1) 2204 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2205 call MPI_SEND(xval,n1,MPI_DOUBLE_PRECISION,dest,my_tag,spaceComm,ier) 2206 end if 2207 #endif 2208 end subroutine xpaw_mpi_send_dp1d
ABINIT/xpaw_mpi_send_dp2d [ Functions ]
NAME
xpaw_mpi_send_dp2d
FUNCTION
MPI_SEND for 2D double precision arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2231 subroutine xpaw_mpi_send_dp2d(xval,dest,tag,spaceComm,ier) 2232 2233 !Arguments------------------------- 2234 real(dp), intent(inout) :: xval(:,:) 2235 integer, intent(in) :: dest,tag,spaceComm 2236 integer, intent(out) :: ier 2237 2238 !Local variables------------------- 2239 #if defined HAVE_MPI 2240 integer :: n1,n2,my_tag 2241 #endif 2242 2243 ! ************************************************************************* 2244 ier=0 2245 #if defined HAVE_MPI 2246 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2247 n1=size(xval,dim=1) ; n2=size(xval,dim=2) 2248 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2249 call MPI_SEND(xval,n1*n2,MPI_DOUBLE_PRECISION,dest,my_tag,spaceComm,ier) 2250 end if 2251 #endif 2252 end subroutine xpaw_mpi_send_dp2d
ABINIT/xpaw_mpi_send_dp3d [ Functions ]
NAME
xpaw_mpi_send_dp3d
FUNCTION
MPI_SEND for 3D double precision arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2275 subroutine xpaw_mpi_send_dp3d(xval,dest,tag,spaceComm,ier) 2276 2277 !Arguments------------------------- 2278 real(dp), intent(inout) :: xval(:,:,:) 2279 integer, intent(in) :: dest,tag,spaceComm 2280 integer, intent(out) :: ier 2281 2282 !Local variables------------------- 2283 #if defined HAVE_MPI 2284 integer :: n1,n2,n3,my_tag 2285 #endif 2286 2287 ! ************************************************************************* 2288 ier=0 2289 #if defined HAVE_MPI 2290 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2291 n1=size(xval,dim=1) ; n2=size(xval,dim=2) ; n3=size(xval,dim=3) 2292 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2293 call MPI_SEND(xval,n1*n2*n3,MPI_DOUBLE_PRECISION,dest,my_tag,spaceComm,ier) 2294 end if 2295 #endif 2296 end subroutine xpaw_mpi_send_dp3d
ABINIT/xpaw_mpi_send_int1d [ Functions ]
NAME
xpaw_mpi_send_int1d
FUNCTION
MPI_ISEND for 1D integer arrays
INPUTS
dest= rank of destination process tag= integer message tag spaceComm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2143 subroutine xpaw_mpi_send_int1d(xval,dest,tag,spaceComm,ier) 2144 2145 !Arguments------------------------- 2146 integer, intent(inout) :: xval(:) 2147 integer, intent(in) :: dest,tag,spaceComm 2148 integer, intent(out) :: ier 2149 2150 !Local variables------------------- 2151 #if defined HAVE_MPI 2152 integer :: my_tag, n1 2153 #endif 2154 2155 ! ************************************************************************* 2156 ier=0 2157 #if defined HAVE_MPI 2158 if (spaceComm /= xpaw_mpi_comm_self .and. spaceComm /= xpaw_mpi_comm_null) then 2159 n1=size(xval,dim=1) 2160 my_tag = MOD(tag,xpaw_mpi_get_tag_ub(spaceComm)) 2161 call MPI_SEND(xval,n1,MPI_INTEGER,dest,my_tag,spaceComm,ier) 2162 end if 2163 #endif 2164 end subroutine xpaw_mpi_send_int1d
ABINIT/xpaw_mpi_sum_dp1d [ Functions ]
NAME
xpaw_mpi_sum_dp1d
FUNCTION
MPI_ALLREDUCE(SUM) for 1D double precision arrays
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2773 subroutine xpaw_mpi_sum_dp1d(xval,comm,ier) 2774 2775 !Arguments------------------------- 2776 real(dp), intent(inout) :: xval(:) 2777 integer, intent(in) :: comm 2778 integer, intent(out) :: ier 2779 2780 !Local variables------------------- 2781 #if defined HAVE_MPI 2782 integer :: n1,nproc 2783 #if !defined HAVE_MPI2_INPLACE 2784 real(dp) :: xsum(size(xval,dim=1)) 2785 #endif 2786 #endif 2787 2788 ! ************************************************************************* 2789 ier=0 2790 #if defined HAVE_MPI 2791 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 2792 call MPI_COMM_SIZE(comm,nproc,ier) 2793 if (nproc /= 1) then 2794 n1=size(xval,dim=1) 2795 #if defined HAVE_MPI2_INPLACE 2796 call MPI_ALLREDUCE(MPI_IN_PLACE,xval,n1,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2797 #else 2798 call MPI_ALLREDUCE(xval,xsum,n1,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2799 xval(:)=xsum(:) 2800 #endif 2801 end if 2802 end if 2803 #endif 2804 end subroutine xpaw_mpi_sum_dp1d
ABINIT/xpaw_mpi_sum_dp2d [ Functions ]
NAME
xpaw_mpi_sum_dp2d
FUNCTION
MPI_ALLREDUCE(SUM) for 2D double precision arrays
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2825 subroutine xpaw_mpi_sum_dp2d(xval,comm,ier) 2826 2827 !Arguments------------------------- 2828 real(dp), intent(inout) :: xval(:,:) 2829 integer, intent(in) :: comm 2830 integer, intent(out) :: ier 2831 2832 !Local variables------------------- 2833 #if defined HAVE_MPI 2834 integer :: n1,n2,nproc 2835 #if !defined HAVE_MPI2_INPLACE 2836 real(dp) :: xsum(size(xval,dim=1),size(xval,dim=2)) 2837 #endif 2838 #endif 2839 2840 ! ************************************************************************* 2841 ier=0 2842 #if defined HAVE_MPI 2843 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 2844 call MPI_COMM_SIZE(comm,nproc,ier) 2845 if (nproc /= 1) then 2846 n1=size(xval,dim=1) ; n2=size(xval,dim=2) 2847 #if defined HAVE_MPI2_INPLACE 2848 call MPI_ALLREDUCE(MPI_IN_PLACE,xval,n1*n2,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2849 #else 2850 call MPI_ALLREDUCE(xval,xsum,n1*n2,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2851 xval(:,:)=xsum(:,:) 2852 #endif 2853 end if 2854 end if 2855 #endif 2856 end subroutine xpaw_mpi_sum_dp2d
ABINIT/xpaw_mpi_sum_dp3d [ Functions ]
NAME
xpaw_mpi_sum_dp3d
FUNCTION
MPI_ALLREDUCE(SUM) for 3D double precision arrays
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2877 subroutine xpaw_mpi_sum_dp3d(xval,comm,ier) 2878 2879 !Arguments------------------------- 2880 real(dp), intent(inout) :: xval(:,:,:) 2881 integer, intent(in) :: comm 2882 integer, intent(out) :: ier 2883 2884 !Local variables------------------- 2885 #if defined HAVE_MPI 2886 integer :: n1,n2,n3,nproc 2887 #if !defined HAVE_MPI2_INPLACE 2888 real(dp) :: xsum(size(xval,dim=1),size(xval,dim=2),size(xval,dim=3)) 2889 #endif 2890 #endif 2891 2892 ! ************************************************************************* 2893 ier=0 2894 #if defined HAVE_MPI 2895 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 2896 call MPI_COMM_SIZE(comm,nproc,ier) 2897 if (nproc /= 1) then 2898 n1=size(xval,dim=1) ; n2=size(xval,dim=2) ; n3=size(xval,dim=3) 2899 #if defined HAVE_MPI2_INPLACE 2900 call MPI_ALLREDUCE(MPI_IN_PLACE,xval,n1*n2*n3,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2901 #else 2902 call MPI_ALLREDUCE(xval,xsum,n1*n2*n3,MPI_DOUBLE_PRECISION,MPI_SUM,comm,ier) 2903 xval(:,:,:)=xsum(:,:,:) 2904 #endif 2905 end if 2906 end if 2907 #endif 2908 end subroutine xpaw_mpi_sum_dp3d
ABINIT/xpaw_mpi_sum_int [ Functions ]
NAME
xpaw_mpi_sum_int
FUNCTION
MPI_ALLREDUCE(SUM) for integers
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer
SOURCE
2675 subroutine xpaw_mpi_sum_int(xval,comm,ier) 2676 2677 !Arguments------------------------- 2678 integer, intent(inout) :: xval 2679 integer, intent(in) :: comm 2680 integer, intent(out) :: ier 2681 2682 !Local variables------------------- 2683 #if defined HAVE_MPI 2684 integer :: nproc 2685 integer :: arr_xval(1),arr_xsum(1) 2686 #endif 2687 2688 ! ************************************************************************* 2689 ier=0 2690 #if defined HAVE_MPI 2691 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 2692 call MPI_COMM_SIZE(comm,nproc,ier) 2693 if (nproc /= 1) then 2694 arr_xval(1)=xval 2695 call MPI_ALLREDUCE(arr_xval,arr_xsum,1,MPI_INTEGER,MPI_SUM,comm,ier) 2696 xval=arr_xsum(1) 2697 end if 2698 end if 2699 #endif 2700 end subroutine xpaw_mpi_sum_int
ABINIT/xpaw_mpi_sum_int1d [ Functions ]
NAME
xpaw_mpi_sum_int1d
FUNCTION
MPI_ALLREDUCE(SUM) for 1D integer arrays
INPUTS
comm= MPI communicator
OUTPUT
ier= exit status, a non-zero value meaning there is an error
SIDE EFFECTS
xval= buffer array
SOURCE
2721 subroutine xpaw_mpi_sum_int1d(xval,comm,ier) 2722 2723 !Arguments------------------------- 2724 integer, intent(inout) :: xval(:) 2725 integer, intent(in) :: comm 2726 integer, intent(out) :: ier 2727 2728 !Local variables------------------- 2729 #if defined HAVE_MPI 2730 integer :: n1,nproc 2731 #if !defined HAVE_MPI2_INPLACE 2732 integer :: xsum(size(xval,dim=1)) 2733 #endif 2734 #endif 2735 2736 ! ************************************************************************* 2737 ier=0 2738 #if defined HAVE_MPI 2739 if (comm /= xpaw_mpi_comm_self .and. comm /= xpaw_mpi_comm_null) then 2740 call MPI_COMM_SIZE(comm,nproc,ier) 2741 if (nproc /= 1) then 2742 n1=size(xval,dim=1) 2743 #if defined HAVE_MPI2_INPLACE 2744 call MPI_ALLREDUCE(MPI_IN_PLACE,xval,n1,MPI_INTEGER,MPI_SUM,comm,ier) 2745 #else 2746 call MPI_ALLREDUCE(xval,xsum,n1,MPI_INTEGER,MPI_SUM,comm,ier) 2747 xval(:)=xsum(:) 2748 #endif 2749 end if 2750 end if 2751 #endif 2752 end subroutine xpaw_mpi_sum_int1d
m_libpaw_mpi/xpaw_mpi_abort [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_abort
FUNCTION
Wrapper for MPI_ABORT
INPUTS
[comm]=communicator of tasks to abort. [mpierr]=Error code to return to invoking environment. [msg]=User message [exit_status]=optional, shell return code, default 1
SOURCE
201 subroutine xpaw_mpi_abort(comm,mpierr,msg,exit_status) 202 203 !Arguments------------------------- 204 integer,optional,intent(in) :: comm,mpierr,exit_status 205 character(len=*),optional,intent(in) :: msg 206 207 !Local variables------------------- 208 integer :: ilen,ierr,ierr2,my_comm,my_errorcode,my_exit_status 209 logical :: testopen 210 #ifdef HAVE_MPI 211 character(len=MPI_MAX_ERROR_STRING) :: mpi_msg_error 212 #endif 213 214 ! ************************************************************************* 215 216 ierr=0 217 my_comm = xpaw_mpi_world; if (PRESENT(comm)) my_comm = comm 218 my_exit_status = 1; if (PRESENT(exit_status)) my_exit_status=exit_status 219 220 if (PRESENT(msg)) then 221 write(std_out,'(2a)')"User message: ",TRIM(msg) 222 end if 223 224 ! Close std_out and ab_out 225 inquire(std_out,opened=testopen) 226 if (testopen) close(std_out) 227 inquire(ab_out,opened=testopen) 228 if (testopen) close(ab_out) 229 230 #ifdef HAVE_MPI 231 my_errorcode=MPI_ERR_UNKNOWN; if (PRESENT(mpierr)) my_errorcode=mpierr 232 call MPI_ERROR_STRING(my_errorcode, mpi_msg_error, ilen, ierr2) 233 call MPI_ABORT(my_comm,my_errorcode,ierr) 234 #endif 235 236 #if defined FC_NAG 237 call exit(exit_status) 238 #elif defined HAVE_FC_EXIT 239 call exit(exit_status) 240 #else 241 if (exit_status== 0) stop "0" 242 if (exit_status== 1) stop "1" 243 if (exit_status==-1) stop "-1" 244 #endif 245 stop "1" 246 247 end subroutine xpaw_mpi_abort
m_libpaw_mpi/xpaw_mpi_barrier [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_barrier
FUNCTION
Wrapper for MPI_BARRIER
INPUTS
comm=MPI communicator
SOURCE
345 subroutine xpaw_mpi_barrier(comm) 346 347 !Arguments------------------------- 348 integer,intent(in) :: comm 349 350 !Local variables------------------- 351 integer :: ier 352 #ifdef HAVE_MPI 353 integer :: nprocs 354 #endif 355 356 ! ************************************************************************* 357 358 ier = 0 359 #ifdef HAVE_MPI 360 if (comm/=xpaw_mpi_comm_null) then 361 call MPI_COMM_SIZE(comm,nprocs,ier) 362 if(nprocs>1)then 363 call MPI_BARRIER(comm,ier) 364 end if 365 end if 366 #endif 367 368 end subroutine xpaw_mpi_barrier
m_libpaw_mpi/xpaw_mpi_comm_rank [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_comm_rank
FUNCTION
Wrapper for MPI_COMM_RANK
INPUTS
comm=MPI communicator.
OUTPUT
xpaw_mpi_comm_rank=The rank of the node inside comm
SOURCE
268 function xpaw_mpi_comm_rank(comm) 269 270 !Arguments------------------------- 271 integer,intent(in) :: comm 272 integer :: xpaw_mpi_comm_rank 273 274 !Local variables------------------- 275 integer :: mpierr 276 277 ! ************************************************************************* 278 279 mpierr=0 280 #ifdef HAVE_MPI 281 xpaw_mpi_comm_rank=-1 ! Return non-sense value if the proc does not belong to the comm 282 if (comm/=xpaw_mpi_comm_null) then 283 call MPI_COMM_RANK(comm,xpaw_mpi_comm_rank,mpierr) 284 end if 285 #else 286 xpaw_mpi_comm_rank=0 287 #endif 288 289 end function xpaw_mpi_comm_rank
m_libpaw_mpi/xpaw_mpi_comm_size [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_comm_size
FUNCTION
Wrapper for MPI_COMM_SIZE
INPUTS
comm=MPI communicator.
OUTPUT
xpaw_mpi_comm_size=The number of processors inside comm.
SOURCE
309 function xpaw_mpi_comm_size(comm) 310 311 !Arguments------------------------- 312 integer,intent(in) :: comm 313 integer :: xpaw_mpi_comm_size 314 315 !Local variables------------------------------- 316 !scalars 317 integer :: mpierr 318 319 ! ************************************************************************* 320 321 mpierr=0; xpaw_mpi_comm_size=1 322 #ifdef HAVE_MPI 323 if (comm/=xpaw_mpi_comm_null) then 324 call MPI_COMM_SIZE(comm,xpaw_mpi_comm_size,mpierr) 325 end if 326 #endif 327 328 end function xpaw_mpi_comm_size
m_libpaw_mpi/xpaw_mpi_iprobe [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_iprobe
FUNCTION
Wrapper for MPI_IPROBE
INPUTS
source= source processes tag= tag value mpicomm= communicator
OUTPUT
flag= True if a message with the specified source, tag, and communicator is available mpierr= status error
SOURCE
469 subroutine xpaw_mpi_iprobe(source,tag,mpicomm,flag,mpierr) 470 471 !Arguments------------------------- 472 integer,intent(in) :: mpicomm,source,tag 473 integer,intent(out) :: mpierr 474 logical,intent(out) :: flag 475 476 !Local variables------------------- 477 #ifdef HAVE_MPI 478 integer :: ier,status(MPI_STATUS_SIZE) 479 #endif 480 481 ! ************************************************************************* 482 483 mpierr = 0 484 #ifdef HAVE_MPI 485 call MPI_IPROBE(source,tag,mpicomm,flag,status,ier) 486 mpierr=ier 487 #endif 488 489 end subroutine xpaw_mpi_iprobe
m_libpaw_mpi/xpaw_mpi_wait [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_wait
FUNCTION
Wrapper for MPI_WAIT
INPUTS
request= MPI request handle to wait for
OUTPUT
mpierr= status error
SOURCE
388 subroutine xpaw_mpi_wait(request,mpierr) 389 390 !Arguments------------------------- 391 integer,intent(out) :: mpierr 392 integer,intent(inout) :: request 393 394 !Local variables------------------- 395 #ifdef HAVE_MPI 396 integer :: ier,status(MPI_STATUS_SIZE) 397 #endif 398 399 ! ************************************************************************* 400 401 mpierr = 0 402 #ifdef HAVE_MPI 403 call MPI_WAIT(request,status,ier) 404 mpierr=ier 405 #endif 406 407 end subroutine xpaw_mpi_wait
m_libpaw_mpi/xpaw_mpi_waitall [ Functions ]
[ Top ] [ m_libpaw_mpi ] [ Functions ]
NAME
xpaw_mpi_waitall
FUNCTION
Wrapper for MPI_WAITALL
INPUTS
array_of_requests= array of request handles
OUTPUT
mpierr= status error
SOURCE
427 subroutine xpaw_mpi_waitall(array_of_requests,mpierr) 428 429 !Arguments------------------------- 430 integer,intent(inout) :: array_of_requests(:) 431 integer,intent(out) :: mpierr 432 433 !Local variables------------------- 434 #ifdef HAVE_MPI 435 integer :: ier,status(MPI_STATUS_SIZE,size(array_of_requests)) 436 #endif 437 438 ! ************************************************************************* 439 440 mpierr = 0 441 #ifdef HAVE_MPI 442 call MPI_WAITALL(size(array_of_requests),array_of_requests,status,ier) 443 mpierr=ier 444 #endif 445 446 end subroutine xpaw_mpi_waitall