TABLE OF CONTENTS


ABINIT/xmpi_gatherv [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv

FUNCTION

  This module contains functions that calls MPI routine,
  if we compile the code using the MPI CPP flags.
  xmpi_gatherv is the generic function.

COPYRIGHT

  Copyright (C) 2001-2022 ABINIT group (MT,GG)
  This file is distributed under the terms of the
  GNU General Public License, see ~ABINIT/COPYING
  or http://www.gnu.org/copyleft/gpl.txt .

SOURCE


ABINIT/xmpi_gatherv_dp [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: one-dimensional double precision arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

369 subroutine xmpi_gatherv_dp(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
370 
371 !Arguments-------------------------
372  real(dp), DEV_CONTARRD intent(in) :: xval(:)
373  real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:)
374  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
375  integer,intent(in) :: nelem,root,comm
376  integer,intent(out) :: ier
377 
378 !Local variables--------------
379  integer :: cc,dd
380 
381 ! *************************************************************************
382 
383  ier=0
384 #if defined HAVE_MPI
385  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
386    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
387 &   MPI_DOUBLE_PRECISION,root,comm,ier)
388  else if (comm == MPI_COMM_SELF) then
389 #endif
390    dd=0;if (size(displs)>0) dd=displs(1)
391    cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1)
392    recvbuf(dd+1:dd+cc)=xval(1:cc)
393 #if defined HAVE_MPI
394  end if
395 #endif
396 
397 end subroutine xmpi_gatherv_dp

ABINIT/xmpi_gatherv_dp2d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp2d

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: double precision two-dimensional arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

424 subroutine xmpi_gatherv_dp2d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
425 
426 !Arguments-------------------------
427  real(dp), DEV_CONTARRD intent(in) :: xval(:,:)
428  real(dp), DEV_CONTARRD intent(inout) :: recvbuf(:,:)
429  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
430  integer,intent(in) :: nelem,root,comm
431  integer,intent(out) :: ier
432 
433 !Local variables--------------
434  integer :: cc,dd,sz1
435 
436 ! *************************************************************************
437 
438  ier=0
439 #if defined HAVE_MPI
440  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
441    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
442 &   MPI_DOUBLE_PRECISION,root,comm,ier)
443  else if (comm == MPI_COMM_SELF) then
444 #endif
445    sz1=size(xval,1)
446    dd=0;if (size(displs)>0) dd=displs(1)/sz1
447    cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1
448    recvbuf(:,dd+1:dd+cc)=xval(:,1:cc)
449 #if defined HAVE_MPI
450  end if
451 #endif
452 
453 end subroutine xmpi_gatherv_dp2d

ABINIT/xmpi_gatherv_dp3d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp3d

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: double precision three-dimensional arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

480 subroutine xmpi_gatherv_dp3d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
481 
482 !Arguments-------------------------
483  real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:)
484  real(dp), DEV_CONTARRD intent(inout)   :: recvbuf(:,:,:)
485  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
486  integer,intent(in) :: nelem,root,comm
487  integer,intent(out) :: ier
488 
489 !Local variables--------------
490  integer :: cc,dd,sz12
491 
492 ! *************************************************************************
493 
494  ier=0
495 #if defined HAVE_MPI
496  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
497    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
498 &   MPI_DOUBLE_PRECISION,root,comm,ier)
499  else if (comm == MPI_COMM_SELF) then
500 #endif
501    sz12=size(xval,1)*size(xval,2)
502    dd=0;if (size(displs)>0) dd=displs(1)/sz12
503    cc=size(xval,3);if (size(recvcounts)>0) cc=recvcounts(1)/sz12
504    recvbuf(:,:,dd+1:dd+cc)=xval(:,:,1:cc)
505 #if defined HAVE_MPI
506  end if
507 #endif
508 
509 end subroutine xmpi_gatherv_dp3d

ABINIT/xmpi_gatherv_dp4d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp4d

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: double precision four-dimensional arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

536 subroutine xmpi_gatherv_dp4d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
537 
538 !Arguments-------------------------
539  real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:)
540  real(dp), DEV_CONTARRD intent(inout)   :: recvbuf(:,:,:,:)
541  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
542  integer,intent(in) :: nelem,root,comm
543  integer,intent(out) :: ier
544 
545 !Local variables-------------------
546  integer :: cc,dd,sz123
547 
548 ! *************************************************************************
549 
550  ier=0
551 #if defined HAVE_MPI
552  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
553    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
554 &   MPI_DOUBLE_PRECISION,root,comm,ier)
555  else if (comm == MPI_COMM_SELF) then
556 #endif
557    sz123=size(xval,1)*size(xval,2)*size(xval,3)
558    dd=0;if (size(displs)>0) dd=displs(1)/sz123
559    cc=size(xval,4);if (size(recvcounts)>0) cc=recvcounts(1)/sz123
560    recvbuf(:,:,:,dd+1:dd+cc)=xval(:,:,:,1:cc)
561 #if defined HAVE_MPI
562  end if
563 #endif
564 
565 end subroutine xmpi_gatherv_dp4d

ABINIT/xmpi_gatherv_dp5d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp5d

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: double precision four-dimensional arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

592 subroutine xmpi_gatherv_dp5d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
593 
594 !Arguments-------------------------
595  real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:,:)
596  real(dp), DEV_CONTARRD intent(inout)   :: recvbuf(:,:,:,:,:)
597  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
598  integer,intent(in) :: nelem,root,comm
599  integer,intent(out) :: ier
600 
601 !Local variables-------------------
602  integer :: cc,dd,sz1234
603 
604 ! *************************************************************************
605 
606  ier=0
607 #if defined HAVE_MPI
608  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
609    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
610 &   MPI_DOUBLE_PRECISION,root,comm,ier)
611  else if (comm == MPI_COMM_SELF) then
612 #endif
613    sz1234=size(xval,1)*size(xval,2)*size(xval,3)*size(xval,4)
614    dd=0;if (size(displs)>0) dd=displs(1)/sz1234
615    cc=size(xval,5);if (size(recvcounts)>0) cc=recvcounts(1)/sz1234
616    recvbuf(:,:,:,:,dd+1:dd+cc)=xval(:,:,:,:,1:cc)
617 #if defined HAVE_MPI
618  end if
619 #endif

ABINIT/xmpi_gatherv_dp6d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_dp6d

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: double precision four-dimensional arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

648 subroutine xmpi_gatherv_dp6d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
649 
650 !Arguments-------------------------
651  real(dp), DEV_CONTARRD intent(in) :: xval(:,:,:,:,:,:)
652  real(dp), DEV_CONTARRD intent(inout)   :: recvbuf(:,:,:,:,:,:)
653  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
654  integer,intent(in) :: nelem,root,comm
655  integer,intent(out) :: ier
656 
657 !Local variables-------------------
658  integer :: cc,dd,sz12345
659 
660 ! *************************************************************************
661 
662  ier=0
663 #if defined HAVE_MPI
664  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
665    call MPI_gatherV(xval,nelem,MPI_DOUBLE_PRECISION,recvbuf,recvcounts,displs,&
666 &   MPI_DOUBLE_PRECISION,root,comm,ier)
667  else if (comm == MPI_COMM_SELF) then
668 #endif
669    sz12345=size(xval,1)*size(xval,2)*size(xval,3)*size(xval,4)*size(xval,5)
670    dd=0;if (size(displs)>0) dd=displs(1)/sz12345
671    cc=size(xval,6);if (size(recvcounts)>0) cc=recvcounts(1)/sz12345
672    recvbuf(:,:,:,:,:,dd+1:dd+cc)=xval(:,:,:,:,:,1:cc)
673 #if defined HAVE_MPI
674  end if
675 #endif
676 
677 end subroutine xmpi_gatherv_dp6d

ABINIT/xmpi_gatherv_int [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_int

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target: one-dimensional integer arrays.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

45 subroutine xmpi_gatherv_int(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
46 
47 !Arguments-------------------------
48  integer, DEV_CONTARRD intent(in) :: xval(:)
49  integer, DEV_CONTARRD intent(inout) :: recvbuf(:)
50  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
51  integer,intent(in) :: nelem,root,comm
52  integer,intent(out) :: ier
53 
54 !Local variables-------------------
55  integer :: cc,dd
56 
57 ! *************************************************************************
58 
59  ier=0
60 #if defined HAVE_MPI
61  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
62    call MPI_gatherV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,&
63 &   MPI_INTEGER,root,comm,ier)
64  else if (comm == MPI_COMM_SELF) then
65 #endif
66    dd=0;if (size(displs)>0) dd=displs(1)
67    cc=size(xval);if (size(recvcounts)>0) cc=recvcounts(1)
68    recvbuf(dd+1:dd+cc)=xval(1:cc)
69 #if defined HAVE_MPI
70  end if
71 #endif
72 
73 end subroutine xmpi_gatherv_int

ABINIT/xmpi_gatherv_int1_dp1 [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_int1_dp1

FUNCTION

  Gathers data from all tasks and delivers it to all.
  Target :  one-dimensional integer arrray and one-dimensionnal dp array

INPUTS

  buf_int=buffer integer array that is going to be gathered
  buf_int_size=size of buf_int array
  buf_dp=buffer dp array that is going to be gathered
  buf_dp_size=size of buf_dp array
  comm=MPI communicator to be gathered on it
  root=rank of receiving process
  comm=MPI communicator

OUTPUT

  buf_int_all=buffer integer array gathered
  buf_int_size_all=size of buffer integer array gathered
  buf_dp_all=buffer dp array gathered
  buf_dp_size_all=size of buffer dp array gathered
  ier=exit status, a non-zero value meaning there is an error

SOURCE

102 subroutine xmpi_gatherv_int1_dp1(buf_int,buf_int_size,buf_dp,buf_dp_size, &
103 &          buf_int_all,buf_int_size_all,buf_dp_all,buf_dp_size_all,root,&
104 &          comm,ier)
105 
106 !Arguments-------------------------
107 !scalars
108  integer,intent(in) :: buf_int_size,buf_dp_size,root,comm
109  integer,intent(out) :: buf_int_size_all,buf_dp_size_all,ier
110 !arrays
111  integer,intent(in) :: buf_int(:)
112  integer,allocatable,target,intent(out) :: buf_int_all(:)
113  real(dp),intent(in) :: buf_dp(:)
114  real(dp),allocatable,target, intent(out) :: buf_dp_all(:)
115 
116 !Local variables--------------
117 !scalars
118  integer :: buf_pack_size,ierr,ii,iproc,istart_dp,istart_int
119  integer :: lg,lg1,lg2,lg_int,lg_dp,me,nproc,position
120  integer :: totalbufcount
121  logical,parameter :: use_pack=.false.
122 !arrays
123  integer :: buf_size(2),pos(3)
124  integer,allocatable :: buf_dp_size1(:),buf_int_size1(:)
125  integer,allocatable :: count_dp(:),count_int(:),count_size(:),counts(:)
126  integer,allocatable :: disp_dp(:),disp_int(:),displ(:),displ_dp(:),displ_int(:)
127  integer,allocatable :: pos_all(:)
128  integer,pointer:: outbuf_int(:)
129  real(dp),pointer :: outbuf_dp(:)
130  character,allocatable :: buf_pack(:),buf_pack_tot(:)
131 
132 ! *************************************************************************
133 
134  ier=0
135 
136 #if defined HAVE_MPI
137  if (comm/=MPI_COMM_SELF.and.comm/=MPI_COMM_NULL) then
138 
139    nproc=xmpi_comm_size(comm)
140 
141 !First version: using 2 allgather (one for ints, another for reals)
142 !------------------------------------------------------------------
143    if (.not.use_pack) then
144 
145 !  Prepare communications
146      ABI_MALLOC(count_int,(nproc))
147      ABI_MALLOC(disp_int,(nproc))
148      ABI_MALLOC(count_dp,(nproc))
149      ABI_MALLOC(disp_dp,(nproc))
150      ABI_MALLOC(count_size,(2*nproc))
151      buf_size(1)=buf_int_size;buf_size(2)=buf_dp_size
152      call xmpi_allgather(buf_size,2, count_size,comm,ier)
153      do iproc=1,nproc
154        count_int(iproc)=count_size(2*iproc-1)
155        count_dp(iproc)=count_size(2*iproc)
156      end do
157      disp_int(1)=0;disp_dp(1)=0
158      do ii=2,nproc
159        disp_int(ii)=disp_int(ii-1)+count_int(ii-1)
160        disp_dp (ii)=disp_dp (ii-1)+count_dp (ii-1)
161      end do
162      buf_int_size_all=sum(count_int)
163      buf_dp_size_all =sum(count_dp)
164      ABI_STAT_MALLOC(buf_int_all,(buf_int_size_all), ier)
165      if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
166      ABI_STAT_MALLOC(buf_dp_all ,(buf_dp_size_all), ier)
167      if (ier/= 0) call xmpi_abort(msg='error allocating buf_dp_all in xmpi_gatherv')
168 
169 !  Communicate (one call for integers, one call for reals)
170      call xmpi_gatherv(buf_int,buf_int_size,buf_int_all,count_int,disp_int,root,comm,ierr)
171      call xmpi_gatherv(buf_dp,buf_dp_size,buf_dp_all,count_dp,disp_dp,root,comm,ierr)
172 
173 !  Release the memory
174      ABI_FREE(count_int)
175      ABI_FREE(disp_int)
176      ABI_FREE(count_dp)
177      ABI_FREE(disp_dp)
178      ABI_FREE(count_size)
179 
180 !2nd version: using 1 allgather (with MPI_PACK)
181 !-----------------------------------------------------------------
182    else
183 
184      me=xmpi_comm_rank(comm)
185 
186 !  Compute size of message
187      call MPI_PACK_SIZE(buf_int_size,MPI_INTEGER,comm,lg1,ier)
188      call MPI_PACK_SIZE(buf_dp_size,MPI_DOUBLE_PRECISION,comm,lg2,ier)
189      lg=lg1+lg2
190 
191 !  Pack data to be sent
192      position=0;buf_pack_size=lg1+lg2
193      ABI_STAT_MALLOC(buf_pack,(buf_pack_size), ier)
194      if (ier/= 0) call xmpi_abort(msg='error allocating buf_pack xmpi_gatherv')
195      call MPI_PACK(buf_int,buf_int_size,MPI_INTEGER,buf_pack,buf_pack_size,position,comm,ier)
196      call MPI_PACK(buf_dp,buf_dp_size,MPI_DOUBLE_PRECISION,buf_pack,buf_pack_size,position,comm,ier)
197 
198 !  Gather size of all packed messages
199      ABI_MALLOC(pos_all,(nproc*3))
200      ABI_MALLOC(counts,(nproc))
201      ABI_MALLOC(buf_int_size1,(nproc))
202      ABI_MALLOC(buf_dp_size1,(nproc))
203      ABI_MALLOC(displ,(nproc))
204      ABI_MALLOC(displ_int,(nproc))
205      ABI_MALLOC(displ_dp,(nproc))
206      pos(1)=position;pos(2)=buf_int_size;pos(3)=buf_dp_size
207      call MPI_ALLGATHER(pos,3,MPI_INTEGER,pos_all,3,MPI_INTEGER,comm,ier)
208      ii=1
209      do iproc=1,nproc
210        counts(iproc)=pos_all(ii);ii=ii+1
211        buf_int_size1(iproc)=pos_all(ii);ii=ii+1
212        buf_dp_size1(iproc)=pos_all(ii);ii=ii+1
213      end do
214 
215      displ(1)=0 ; displ_int(1)=0 ; displ_dp(1)=0
216      do iproc=2,nproc
217        displ(iproc)=displ(iproc-1)+counts(iproc-1)
218        displ_int(iproc)=displ_int(iproc-1)+buf_int_size1(iproc-1)
219        displ_dp(iproc)=displ_dp(iproc-1)+buf_dp_size1(iproc-1)
220      end do
221 
222      totalbufcount=displ(nproc)+counts(nproc)
223      ABI_STAT_MALLOC(buf_pack_tot,(totalbufcount), ier)
224      if (ier/= 0) call xmpi_abort(msg='error allocating buf_pack_tot in xmpi_gatherv')
225      buf_int_size_all=sum(buf_int_size1)
226      buf_dp_size_all=sum(buf_dp_size1)
227 
228      if (me==root) then
229        ABI_STAT_MALLOC(buf_int_all,(buf_int_size_all), ier)
230        if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
231        ABI_STAT_MALLOC(buf_dp_all,(buf_dp_size_all), ier)
232        if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
233      else
234        ABI_STAT_MALLOC(buf_int_all,(1), ier)
235        if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
236        ABI_STAT_MALLOC(buf_dp_all,(1), ier)
237        if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
238      end if
239 
240 !  Gather all packed messages
241      call MPI_GATHERV(buf_pack,position,MPI_PACKED,buf_pack_tot,counts,displ,MPI_PACKED,root,comm,ier)
242      if (me==root) then
243        position=0
244        do iproc=1,nproc
245          lg_int=buf_int_size1(iproc); lg_dp=buf_dp_size1(iproc)
246          istart_int=displ_int(iproc); istart_dp=displ_dp(iproc)
247          outbuf_int=>buf_int_all(istart_int+1:istart_int+lg_int)
248          call MPI_UNPACK(buf_pack_tot,totalbufcount,position, outbuf_int, &
249 &         lg_int, MPI_INTEGER,comm,ier)
250          outbuf_dp=>buf_dp_all(istart_dp+1:istart_dp+lg_dp)
251          call MPI_UNPACK(buf_pack_tot,totalbufcount,position,outbuf_dp, &
252 &         lg_dp, MPI_DOUBLE_PRECISION,comm,ier)
253        end do
254      end if
255 
256 !  Release the memory
257      ABI_FREE(pos_all)
258      ABI_FREE(counts)
259      ABI_FREE(buf_int_size1)
260      ABI_FREE(buf_dp_size1)
261      ABI_FREE(displ)
262      ABI_FREE(displ_int)
263      ABI_FREE(displ_dp)
264      ABI_FREE(buf_pack_tot)
265      ABI_FREE(buf_pack)
266 
267    end if
268  else if (comm == MPI_COMM_SELF) then
269 #endif
270 
271 !Sequential version
272    ABI_STAT_MALLOC(buf_int_all,(buf_int_size), ier)
273    if (ier/= 0) call xmpi_abort(msg='error allocating buf_int_all in xmpi_gatherv')
274    ABI_STAT_MALLOC(buf_dp_all,(buf_dp_size), ier)
275    if (ier/= 0) call xmpi_abort(msg='error allocating buf_dp_all in xmpi_gatherv')
276    buf_int_all(:)=buf_int(:)
277    buf_dp_all(:)=buf_dp(:)
278    buf_int_size_all=buf_int_size
279    buf_dp_size_all=buf_dp_size
280 
281 #if defined HAVE_MPI
282  end if
283 #endif
284 
285 end subroutine xmpi_gatherv_int1_dp1

ABINIT/xmpi_gatherv_int2d [ Functions ]

[ Top ] [ Functions ]

NAME

  xmpi_gatherv_int2d

FUNCTION

  This module contains functions that calls MPI routine,
  if we compile the code using the MPI CPP flags.
  xmpi_gatherv is the generic function.

INPUTS

  xval= buffer array
  recvcounts= number of received elements
  displs= relative offsets for incoming data
  nelem= number of elements
  root= rank of receiving process
  comm= MPI communicator

OUTPUT

  ier= exit status, a non-zero value meaning there is an error

SIDE EFFECTS

  recvbuf= received buffer

SOURCE

313 subroutine xmpi_gatherv_int2d(xval,nelem,recvbuf,recvcounts,displs,root,comm,ier)
314 
315 !Arguments-------------------------
316  integer, DEV_CONTARRD intent(in) :: xval(:,:)
317  integer, DEV_CONTARRD intent(inout) :: recvbuf(:,:)
318  integer, DEV_CONTARRD intent(in) :: recvcounts(:),displs(:)
319  integer,intent(in) :: nelem,root,comm
320  integer,intent(out) :: ier
321 
322 !Local variables--------------
323  integer :: cc,dd,sz1
324 
325 ! *************************************************************************
326 
327  ier=0
328 #if defined HAVE_MPI
329  if (comm /= MPI_COMM_SELF .and. comm /= MPI_COMM_NULL) then
330    call MPI_gatherV(xval,nelem,MPI_INTEGER,recvbuf,recvcounts,displs,&
331 &   MPI_INTEGER,root,comm,ier)
332  else if (comm == MPI_COMM_SELF) then
333 #endif
334    sz1=size(xval,1)
335    dd=0;if (size(displs)>0) dd=displs(1)/sz1
336    cc=size(xval,2);if (size(recvcounts)>0) cc=recvcounts(1)/sz1
337    recvbuf(:,dd+1:dd+cc)=xval(:,1:cc)
338 #if defined HAVE_MPI
339  end if
340 #endif
341 
342 end subroutine xmpi_gatherv_int2d