TABLE OF CONTENTS


ABINIT/m_paral_pert [ Modules ]

[ Top ] [ Modules ]

NAME

  m_paral_pert

FUNCTION

  This module provides routines to manage the parallelisation/distribution
  over perturbations

COPYRIGHT

 Copyright (C) 2013-2024 ABINIT group (MT,FJ,MD)
 This file is distributed under the terms of the
 GNU General Public License, see ~abinit/COPYING
 or http://www.gnu.org/copyleft/gpl.txt .
 For the initials of contributors, see ~abinit/doc/developers/contributors.txt.

SOURCE

18 #if defined HAVE_CONFIG_H
19 #include "config.h"
20 #endif
21 
22 #include "abi_common.h"
23 
24 MODULE m_paral_pert
25 
26  use defs_basis
27  use m_abicore
28  use m_errors
29  use m_xmpi
30  use m_dtset
31 
32  use defs_abitypes, only : MPI_type
33  use m_time,      only : timab
34  use m_copy,      only : deep_copy
35  use m_paw_an,    only : paw_an_type, paw_an_free, paw_an_redistribute
36  use m_paw_ij,    only : paw_ij_type, paw_ij_free, paw_ij_redistribute
37  use m_pawfgrtab, only : pawfgrtab_type, pawfgrtab_free, pawfgrtab_redistribute
38  use m_pawrhoij,  only : pawrhoij_type, pawrhoij_free, pawrhoij_redistribute
39  use m_paral_atom,only : get_atm_proc
40  use m_mpinfo,    only : initmpi_atom
41 
42  implicit none
43 
44  private
45 
46 !public procedures.
47  public :: set_pert_comm
48  public :: unset_pert_comm
49  public :: set_pert_paw
50  public :: unset_pert_paw
51 
52 !private procedures.
53  private :: get_exchatom_list
54  private :: get_exchatom_list1

m_paral_atom/get_exchatom_list [ Functions ]

[ Top ] [ m_paral_atom ] [ Functions ]

NAME

 get_exchatom_list

FUNCTION

 This routine determine the list of atoms to be exchanged by current process and other processes.
 The initial communicator (mpicomm_in) should be a common ancestor of (mpicomm_in and mpicomm_out);
 It is splitted in "nbgroup" final communicators (mpicomm_out).

INPUTS

  mpicomm_in=input MPI communicator (over atoms) among which atoms are initially distributed
  mpicomm_out=MPI communicator (over atoms) among which we want to distribute atoms
  my_atmtab_in= Index of atoms initially treated by current proc (communicator mpicomm_in)
  my_atmtab_out= Index of atoms finally treated by current proc (communicator mpicomm_out)
  natom = total number of atoms
  nbgroup = # of mpicomm_out communicators included in mpicomm_in

OUTPUT

 For current proc :
 RecvAtomProc(:)= rank of processor from which I expect atom (in mpicomm_in)
 RecvAtomList(:)= indexes of atoms to be received by me
   RecvAtomList(irecv) are the atoms I expect from RecvAtomProc(irecv)
 SendAtomProc(:)= ranks of process destination of atom (in mpicomm_in)
 SendAtomList(:)= indexes of atoms to be sent by me
   SendAtomList(isend) are the atoms sent to SendAtomProc(isend)

SOURCE

609  subroutine get_exchatom_list(mpicomm_in,mpicomm_out,my_atmtab_in,my_atmtab_out,natom, &
610 &                             SendAtomProc,SendAtomList,RecvAtomProc,RecvAtomList)
611 
612 !Arguments ---------------------------------------------
613 !scalars
614  integer,intent(in) :: mpicomm_in,mpicomm_out,natom
615 !arrays
616  integer,intent(in) :: my_atmtab_in(:),my_atmtab_out(:)
617  integer,allocatable,intent(out) :: RecvAtomProc(:),RecvAtomList(:)
618  integer,allocatable,intent(out) :: SendAtomList(:),SendAtomProc(:)
619 
620 !Local variables ---------------------------------------
621 !scalars
622  integer :: igroup,i1,ierr,ii,me_in,me_in_out,me_out,me_out0_in
623  integer :: my_natom_in,my_natom_out,nbgroup,nbsend,nproc_in,nproc_max,nproc_out
624 !arrays
625  integer :: buf_int(3),rank0(1),ranks0_out_in(1)
626  integer, allocatable :: buf_int_all(:),group(:),master_commout(:),procs(:),ranks(:)
627  integer, allocatable  :: ranks1(:,:),sizecomm(:)
628 
629 ! *************************************************************************
630 
631  nproc_in=xmpi_comm_size(mpicomm_in)
632  me_in=xmpi_comm_rank(mpicomm_in)
633  my_natom_in=size(my_atmtab_in)
634  nproc_out=xmpi_comm_size(mpicomm_out)
635  me_out=xmpi_comm_rank(mpicomm_out)
636  my_natom_out=size(my_atmtab_out)
637 
638  rank0(1)=0
639  call xmpi_comm_translate_ranks(mpicomm_out,1,rank0(1),mpicomm_in,ranks0_out_in(1))
640  me_out0_in=ranks0_out_in(1)
641 
642  ABI_MALLOC(ranks,(1:nproc_in))
643  ABI_MALLOC(sizecomm,(1:nproc_in))
644  ABI_MALLOC(master_commout,(1:nproc_in))
645  ABI_MALLOC(group,(0:nproc_in-1))
646  buf_int(1)=me_out; buf_int(2)=me_out0_in; buf_int(3)=nproc_out;
647 
648  ABI_MALLOC(buf_int_all,(3*nproc_in))
649  call xmpi_allgather(buf_int,3,buf_int_all,mpicomm_in,ierr)
650  nbgroup=0;
651  nproc_max=0
652  ranks(:)=-1;group(:)=-1;sizecomm(:)=-1;master_commout(:)=-1
653 
654  do ii=1,nproc_in
655    ranks(ii)=buf_int_all(3*ii-2)    !me_out
656    master_commout(ii)=buf_int_all(3*ii-1) !rank of me_out=0 of mpicomm_out expressed in mpicomm_in
657 
658    if (ranks(ii)==0) then
659      nbgroup=nbgroup+1
660      sizecomm(nbgroup)=buf_int_all(3*ii) !nproc_out
661      group(master_commout(ii))=nbgroup
662      if (sizecomm(nbgroup)>nproc_max) nproc_max=sizecomm(nbgroup)
663    end if
664 
665  enddo
666  ABI_FREE(buf_int_all)
667 
668 
669  ABI_MALLOC(ranks1,(nbgroup,0:nproc_max-1))
670  ranks1(:,:)=-1
671  do ii=1,nproc_in
672    me_in_out=ranks(ii)
673    igroup=group(master_commout(ii))
674    ranks1(igroup,me_in_out)=ii-1 !numbering of procs
675  enddo
676 
677 !Send
678  nbsend=0
679  if (my_natom_in>0) then
680    ABI_MALLOC(SendAtomList,(my_natom_in*nbgroup))
681    ABI_MALLOC(SendAtomProc,(my_natom_in*nbgroup))
682    ABI_MALLOC(procs,(my_natom_in))
683    do igroup=1,nbgroup
684      call get_atm_proc(my_atmtab_in,natom,sizecomm(igroup),procs)
685      do i1=1,my_natom_in
686        nbsend=nbsend+1
687        SendAtomProc(nbsend)=ranks1(igroup,procs(i1))
688        SendAtomList(nbsend)=my_atmtab_in(i1)
689      end do
690    end do
691    ABI_FREE(procs)
692  else
693   ABI_MALLOC(SendAtomList,(0))
694   ABI_MALLOC(SendAtomProc,(0))
695 end if
696 
697 !recv
698  if (my_natom_out>0) then !no return before because of xmpi_allgather and of the sending operation
699    ABI_MALLOC(RecvAtomProc,(my_natom_out))
700    ABI_MALLOC(RecvAtomList,(my_natom_out))
701    RecvAtomList(:)=my_atmtab_out(:)
702 !the atoms are put in increasing order,see get_my_atmtab so the procs are sorted by growing process
703    call get_atm_proc(RecvAtomList,natom,nproc_in,RecvAtomProc)
704  else
705    ABI_MALLOC(RecvAtomList,(0))
706    ABI_MALLOC(RecvAtomProc,(0))
707  end if
708 
709  ABI_FREE(master_commout)
710  ABI_FREE(ranks1)
711  ABI_FREE(group)
712  ABI_FREE(ranks)
713  ABI_FREE(sizecomm)
714 
715 end subroutine get_exchatom_list

m_paral_atom/get_exchatom_list1 [ Functions ]

[ Top ] [ m_paral_atom ] [ Functions ]

NAME

 get_exchatom_list1

FUNCTION

 This routine determine the list of atoms to be exchanged by current process and other processes.
 this function redistribute the atoms of one mpicomm_in among mpicomm_out

INPUTS

  mpicomm_in=input MPI communicator (over atoms) among which atoms are initially distributed
  mpicomm_out=MPI communicator (over atoms) among which we want to distribute atoms
  my_atmtab_in= Index of atoms initially treated by current proc (communicator mpicomm_in)
  my_atmtab_out= Index of atoms finally treated by current proc (communicator mpicomm_out)
  natom = total number of atoms

OUTPUT

 For current proc:
 RecvAtomProc(:)= rank of processor from which I expect atom (in mpicomm_out)
 RecvAtomList(:)= indexes of atoms to be received by me
   RecvAtomList(irecv) are the atoms I expect from RecvAtomProc(irecv)
 SendAtomProc(:)= ranks of process destination of atom (in mpicomm_out)
 SendAtomList(:)= indexes of atoms to be sent by me
   SendAtomList(isend) are the atoms sent to SendAtomProc(isend)

NOTES

 Previously mpicomm_out has be split in several mpicomm_in.
 In our purpose, we only need to redistribute the atoms of one mpicomm_in among mpicomm_out
 because all structures of atoms we have to exchange have the same value in each mpicomm_in.
 The mpicomm_in we choose is the one in which the processor 0 of mpicomm_out belong

SOURCE

752 subroutine get_exchatom_list1(mpicomm_in,mpicomm_out,my_atmtab_in,my_atmtab_out,natom, &
753 &                             SendAtomProc,SendAtomList,RecvAtomProc,RecvAtomList)
754 
755 !Arguments ---------------------------------------------
756 !scalars
757  integer,intent(in) :: mpicomm_in,mpicomm_out,natom
758 !arrays
759  integer,intent(in) :: my_atmtab_in(:),my_atmtab_out(:)
760  integer,allocatable,intent(out) :: RecvAtomProc(:),RecvAtomList(:)
761  integer,allocatable,intent(out) :: SendAtomList(:),SendAtomProc(:)
762 
763 !Local variables ---------------------------------------
764 !scalars
765  integer :: i1,ier,me_out,my_natom_in,my_natom_out,nproc_in,nproc_out
766  logical :: sender
767 !arrays
768  integer,allocatable :: ranks_in(:),ranks_in_out(:)
769 
770 ! *************************************************************************
771 
772  me_out=xmpi_comm_rank(mpicomm_out)
773  my_natom_in=size(my_atmtab_in)
774  nproc_out=xmpi_comm_size(mpicomm_out)
775  my_natom_out=size(my_atmtab_out)
776  nproc_in=xmpi_comm_size(mpicomm_in)
777 
778  call xmpi_bcast(nproc_in,0,mpicomm_out,ier)
779  ABI_MALLOC(ranks_in_out,(0:nproc_in-1))
780 
781 !All atoms are distributed among each mpicomm_in
782 !redistribute the atoms of one mpicomm_in among mpicomm_out
783 
784 !Look for the communicator mpicomm_in from which me_out=0 belong
785 !Get ranks of all processors of mpicomm_in expressed in mpicomm_out
786  if (me_out==0) then
787    ABI_MALLOC(ranks_in,(0:nproc_in-1))
788    ranks_in=(/ (i1,i1=0,nproc_in-1 )/)
789    call xmpi_comm_translate_ranks(mpicomm_in,nproc_in,ranks_in,mpicomm_out,ranks_in_out)
790    ABI_FREE(ranks_in)
791  end if
792  call xmpi_bcast(ranks_in_out,0,mpicomm_out,ier)
793 
794 !Check if me_out is one of the sending proc.
795 !(ie belongs to the mpicomm_in from which me_out belong)
796  sender = .false.
797  do i1=0,nproc_in-1
798    if (me_out==ranks_in_out(i1)) then
799      sender = .true.
800      exit
801    end if
802  end do
803 
804 !Send
805  if (my_natom_in>0.and.sender) then
806    ABI_MALLOC(SendAtomList,(my_natom_in))
807    ABI_MALLOC(SendAtomProc,(my_natom_in))
808    SendAtomList(:)=my_atmtab_in(:)
809 !  The atoms are put in increasing order,see get_my_atmtab
810 !  so the procs are sorted by growing process
811    call get_atm_proc(SendAtomList,natom,nproc_out,SendAtomProc)
812  else
813    ABI_MALLOC(SendAtomList,(0))
814    ABI_MALLOC(SendAtomProc,(0))
815  end if
816 
817 !Recv
818  if (my_natom_out>0) then
819    ABI_MALLOC(RecvAtomProc,(my_natom_out))
820    ABI_MALLOC(RecvAtomList,(my_natom_out))
821    RecvAtomList(:)=my_atmtab_out(:)
822 !  The atoms are put in increasing order,see get_my_atmtab
823 !  so the procs are sorted by growing process
824    call get_atm_proc(RecvAtomList,natom,nproc_in,RecvAtomProc)
825    RecvAtomProc(:)=ranks_in_out(RecvAtomProc(:))
826  else
827    ABI_MALLOC(RecvAtomList,(0))
828    ABI_MALLOC(RecvAtomProc,(0))
829  end if
830 
831  ABI_FREE(ranks_in_out)
832 
833 end subroutine get_exchatom_list1

m_paral_pert/set_pert_comm [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 set_pert_comm

FUNCTION

 Set the MPI communicators over the perturbed cell

INPUTS

  nppert=number of processes for the parallelization over perturbations

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization

SOURCE

77 subroutine set_pert_comm(mpi_enreg,nppert)
78 
79 !Arguments ---------------------------------------------
80 !scalars
81  integer,intent(in) :: nppert
82  type(MPI_type), intent(inout) :: mpi_enreg
83 
84 ! *************************************************************************
85 
86  if (mpi_enreg%paral_pert==0) return
87 
88 !New communicators for the perturbed cell
89  mpi_enreg%comm_cell =mpi_enreg%comm_cell_pert
90  mpi_enreg%me_cell   =xmpi_comm_rank(mpi_enreg%comm_cell)
91  mpi_enreg%nproc_cell=mpi_enreg%nproc/nppert
92 
93 !Adjust other communicators
94  mpi_enreg%comm_kpt    =mpi_enreg%comm_cell
95  mpi_enreg%me_kpt      =mpi_enreg%me_cell
96  mpi_enreg%comm_kptband=mpi_enreg%comm_cell
97  mpi_enreg%comm_wvl    =mpi_enreg%comm_cell
98 
99 end subroutine set_pert_comm

m_paral_pert/set_pert_paw [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 set_pert_paw

FUNCTION

 Set PAW data for the parallelization over perturbations:
  Set MPI communicator over atomic sites
  Redistribute PAW on-site data

INPUTS

  dtset <type(dataset_type)>=all input variables for this dataset

OUTPUT

  paw_an_out(my_natom)<type(paw_an_type)>= --optional--
              redistributed PAW arrays given on angular mesh
              if not present, paw_an is redistributed "in-place"
  paw__ij_out(my_natom)<type(paw_ij_type)>= --optional--
              redistributed PAW arrays given on (i,j) channels
              if not present, paw_ij is redistributed "in-place"
  pawfgrtab_out(my_natom)<type(pawfgrtab_type)>= --optional--
              redistributed PAW atomic data given on fine grid
              if not present, pawfgrtab is redistributed "in-place"
  pawrhoij_out(my_natom)<type(pawrhoij_type)>= --optional--
              redistributed PAW rhoij occupancies
              if not present, pawrhoij is redistributed "in-place"
  old_atmtab=save the indexes of the atoms treated by current proc
  old_comm_atom=save the identifier of the MPI communicator

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor
  paw_an(my_natom)<type(paw_an_type)>=PAW arrays given on angular mesh
  paw_ij(my_natom)<type(paw_ij_type)>=PAW arrays given on (i,j) channels
  pawfgrtab(my_natom)<type(pawfgrtab_type)>=PAW atomic data given on fine grid
  pawrhoij(my_natom)<type(pawrhoij_type)>=PAW rhoij occupancies

SOURCE

180 subroutine set_pert_paw(dtset,mpi_enreg,my_natom,old_atmtab,old_comm_atom,&
181 &                       paw_an,paw_ij,pawfgrtab,pawrhoij,&
182 &                       paw_an_out,paw_ij_out,pawfgrtab_out,pawrhoij_out)
183 
184 !Arguments ---------------------------------------------
185 !scalars
186  integer,intent(inout) :: my_natom
187  integer,intent(inout) :: old_comm_atom
188  type(dataset_type), intent(in) :: dtset
189  type(MPI_type), intent(inout) :: mpi_enreg
190 !arrays
191  integer,pointer,intent(out) :: old_atmtab(:)
192  type(paw_ij_type),allocatable,target,intent(inout) :: paw_ij(:)
193  type(paw_ij_type),optional,pointer,intent(inout) :: paw_ij_out(:)
194  type(paw_an_type),allocatable,target,intent(inout) :: paw_an(:)
195  type(paw_an_type),optional,pointer,intent(inout) :: paw_an_out(:)
196  type(pawfgrtab_type),allocatable,target,intent(inout) :: pawfgrtab(:)
197  type(pawfgrtab_type),optional,pointer,intent(inout) :: pawfgrtab_out(:)
198  type(pawrhoij_type),allocatable,target,intent(inout) :: pawrhoij(:)
199  type(pawrhoij_type),optional,pointer,intent(inout) :: pawrhoij_out(:)
200 
201 !Local variables ---------------------------------------
202 !scalars
203 !Type of algo used for communications: 1-brute force, 2-asynchronous
204  integer,parameter :: algo_option=2
205  logical :: paral_atom
206 !arrays
207  integer,allocatable :: SendAtomProc(:), SendAtomList(:)
208  integer,allocatable :: RecvAtomProc(:), RecvAtomList(:)
209  real(dp) :: tsec(2)
210 
211 ! *************************************************************************
212 
213  call timab(593,1,tsec)
214 
215  paral_atom=(dtset%natom/=my_natom)
216 
217 !Default value when parallelization over atomic sites is not activated
218  if ((mpi_enreg%paral_pert==0).or.(.not.paral_atom).or.(dtset%usepaw==0)) then
219    old_comm_atom=mpi_enreg%comm_atom
220    call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
221    if (present(pawrhoij_out))  pawrhoij_out=>pawrhoij
222    if (present(paw_ij_out))    paw_ij_out=>paw_ij
223    if (present(paw_an_out))    paw_an_out=>paw_an
224    if (present(pawfgrtab_out)) pawfgrtab_out=>pawfgrtab
225    return
226  end if
227 
228 !Redefine communicator for atoms and store old communicator
229 
230  old_comm_atom=mpi_enreg%comm_atom
231  call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
232  call initmpi_atom(dtset,mpi_enreg)
233  my_natom=mpi_enreg%my_natom
234 
235 !If "asynchronous algo", determine lists of atoms to be exchanged
236 !  between me and other processes
237  if (algo_option==2) then
238    call timab(594,1,tsec)
239    call get_exchatom_list(old_comm_atom,mpi_enreg%comm_atom,old_atmtab,&
240 &       mpi_enreg%my_atmtab,dtset%natom,SendAtomProc,&
241 &       SendAtomList,RecvAtomProc,RecvAtomList)
242    call timab(594,2,tsec)
243  end if
244 
245 !Redistribute PAW on-site data
246 
247 !pawrhoij datastructure
248  call timab(595,1,tsec)
249  if (present(pawrhoij_out)) then
250    nullify(pawrhoij_out)
251    if (algo_option==1) then
252      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
253 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
254 &         natom=dtset%natom,pawrhoij_out=pawrhoij_out)
255    else if (algo_option==2) then
256      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
257 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
258 &         natom=dtset%natom,pawrhoij_out=pawrhoij_out, &
259 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
260 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
261    end if
262  else
263    if (algo_option==1) then
264      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
265 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
266 &         natom=dtset%natom)
267    else if (algo_option==2) then
268      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
269 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
270 &         natom=dtset%natom,&
271 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
272 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
273    end if
274  end if
275  call timab(595,2,tsec)
276 
277 !paw_ij datastructure
278  call timab(596,1,tsec)
279  if (present(paw_ij_out)) then
280    nullify(paw_ij_out)
281    if (algo_option==1) then
282      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
283 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
284 &         natom=dtset%natom,paw_ij_out=paw_ij_out)
285    else if (algo_option==2) then
286      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
287 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
288 &         natom=dtset%natom,paw_ij_out=paw_ij_out,&
289 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
290 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
291    end if
292  else
293    if (algo_option==1) then
294      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
295 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
296 &         natom=dtset%natom)
297    else if (algo_option==2) then
298      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
299 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
300 &         natom=dtset%natom,&
301 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
302 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
303    end if
304  end if
305  call timab(596,2,tsec)
306 
307 !paw_an datastructure
308  call timab(597,1,tsec)
309  if (present(paw_an_out)) then
310    nullify(paw_an_out)
311    if (algo_option==1) then
312      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
313 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
314 &         natom=dtset%natom,paw_an_out=paw_an_out)
315    else if (algo_option==2) then
316      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
317 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
318 &         natom=dtset%natom,paw_an_out=paw_an_out,&
319 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
320 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
321    end if
322  else
323    if (algo_option==1) then
324      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
325 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
326 &         natom=dtset%natom)
327    else if (algo_option==2) then
328      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
329 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
330 &         natom=dtset%natom,&
331 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
332 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
333    end if
334  end if
335  call timab(597,2,tsec)
336 
337 !pawfgrtab datastructure
338  call timab(598,1,tsec)
339  if (present(pawfgrtab_out)) then
340    nullify(pawfgrtab_out)
341    if (algo_option==1) then
342      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
343 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
344 &         natom=dtset%natom,pawfgrtab_out=pawfgrtab_out)
345    else if (algo_option==2) then
346      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
347 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
348 &         natom=dtset%natom,pawfgrtab_out=pawfgrtab_out,&
349 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
350 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
351    end if
352  else
353    if (algo_option==1) then
354      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
355 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
356 &         natom=dtset%natom)
357    else if (algo_option==2) then
358      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
359 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
360 &         natom=dtset%natom,&
361 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
362 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
363    end if
364  end if
365  call timab(598,2,tsec)
366 
367  if (algo_option==2) then
368    ABI_FREE(SendAtomProc)
369    ABI_FREE(SendAtomList)
370    ABI_FREE(RecvAtomProc)
371    ABI_FREE(RecvAtomList)
372  end if
373 
374  call timab(593,2,tsec)
375 
376 end  subroutine set_pert_paw

m_paral_pert/unset_pert_comm [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 unset_pert_comm

FUNCTION

 Unset the MPI communicators over the perturbed cell; restore the global communicators.

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization

SOURCE

116 subroutine unset_pert_comm(mpi_enreg)
117 
118 !Arguments ---------------------------------------------
119 !scalars
120  type(MPI_type), intent(inout) :: mpi_enreg
121 
122 ! *************************************************************************
123 
124  if (mpi_enreg%paral_pert==0) return
125 
126 !New communicators for the cell
127  mpi_enreg%comm_cell =mpi_enreg%comm_world
128  mpi_enreg%nproc_cell=mpi_enreg%nproc
129  mpi_enreg%me_cell   =mpi_enreg%me
130 
131 !Adjust other communicators
132  mpi_enreg%comm_kpt    =mpi_enreg%comm_cell
133  mpi_enreg%me_kpt      =mpi_enreg%me_cell
134  mpi_enreg%comm_kptband=mpi_enreg%comm_cell
135  mpi_enreg%comm_wvl    =mpi_enreg%comm_cell
136 
137 end  subroutine unset_pert_comm

m_paral_pert/unset_pert_paw [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 unset_pert_paw

FUNCTION

 Unset PAW data after the parallelization over perturbations:
  Restore MPI communicator over atomic sites
  Restore PAW on-site data

INPUTS

  dtset <type(dataset_type)>=all input variables for this dataset
  old_atmtab=index of atoms to restore
  old_comm_atom=MPI communicator to restore
  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor

OUTPUT

  paw_an_out(my_natom)<type(paw_an_type)>= --optional--
              redistributed PAW arrays given on angular mesh
              if not present, paw_an is restored "in-place"
  paw__ij_out(my_natom)<type(paw_ij_type)>= --optional--
              redistributed PAW arrays given on (i,j) channels
              if not present, paw_ij is restored "in-place"
  pawfgrtab_out(my_natom)<type(pawfgrtab_type)>= --optional--
              redistributed PAW atomic data given on fine grid
              if not present, pawfgrtab is restored "in-place"
  pawrhoij_out(my_natom)<type(pawrhoij_type)>= --optional--
              redistributed PAW rhoij occupancies
              if not present, pawrhoij is restored "in-place"
 old_atmtab=save the indexes of atoms treated by current proc
 old_comm_atom=save the identifier of the MPI communicator

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor
  paw_an(my_natom)<type(paw_an_type)>=PAW arrays given on angular mesh
  paw_ij(my_natom)<type(paw_ij_type)>=PAW arrays given on (i,j) channels
  pawfgrtab(my_natom)<type(pawfgrtab_type)>=PAW atomic data given on fine grid
  pawrhoij(my_natom)<type(pawrhoij_type)>=PAW rhoij occupancies

SOURCE

423 subroutine unset_pert_paw(dtset,mpi_enreg,my_natom,old_atmtab,old_comm_atom,&
424 &                       paw_an,paw_ij,pawfgrtab,pawrhoij,&
425 &                       paw_an_out,paw_ij_out,pawfgrtab_out,pawrhoij_out)
426 
427 !Arguments ---------------------------------------------
428 !scalars
429  integer,intent(inout) :: my_natom
430  integer,intent(inout) :: old_comm_atom
431  type(dataset_type), intent(in) :: dtset
432  type(MPI_type), intent(inout) :: mpi_enreg
433 !arrays
434  integer,pointer,intent(inout) :: old_atmtab(:)
435  type(paw_ij_type),allocatable,target,intent(inout) :: paw_ij(:)
436  type(paw_ij_type),optional,pointer,intent(inout) :: paw_ij_out(:)
437  type(paw_an_type),allocatable,target,intent(inout) :: paw_an(:)
438  type(paw_an_type),optional,pointer,intent(inout) :: paw_an_out(:)
439  type(pawfgrtab_type),allocatable,target,intent(inout) :: pawfgrtab(:)
440  type(pawfgrtab_type),optional,pointer,intent(inout) :: pawfgrtab_out(:)
441  type(pawrhoij_type),allocatable,target,intent(inout) :: pawrhoij(:)
442  type(pawrhoij_type),optional,pointer,intent(inout) :: pawrhoij_out(:)
443 
444 !Local variables ---------------------------------------
445 !scalars
446 !Type of algo used for communications: 1-brute force, 2-asynchronous
447  integer,parameter :: algo_option=2
448 integer :: my_natom_old
449  logical :: exchange,paral_atom
450 !arrays
451  integer,allocatable :: SendAtomProc(:), SendAtomList(:)
452  integer,allocatable :: RecvAtomProc(:), RecvAtomList(:)
453  real(dp) :: tsec(2)
454 
455 ! *************************************************************************
456 
457 !Nothing to do when parallelization over atomic sites is not activated
458  if ((mpi_enreg%paral_pert==0).or.(dtset%usepaw==0)) return
459  if (.not.associated(old_atmtab)) return
460  paral_atom=(dtset%natom/=size(old_atmtab))
461  if (.not.paral_atom) return
462 
463 !If "asynchronous algo", determine lists of atoms to be exchanged
464 !  between me and other processes
465  exchange=.true.
466  if (present(pawrhoij_out).and.present(paw_ij_out).and. &
467 &    present(paw_an_out)  .and.present(pawfgrtab_out)) exchange=.false.
468  if (algo_option==2.and.exchange) then
469    call timab(594,1,tsec)
470    my_natom_old=size(old_atmtab)
471    call get_exchatom_list1(mpi_enreg%comm_atom,old_comm_atom,mpi_enreg%my_atmtab,&
472 &       old_atmtab,dtset%natom,SendAtomProc,&
473 &       SendAtomList,RecvAtomProc,RecvAtomList)
474    call timab(594,2,tsec)
475  end if
476 
477 !Redistribute PAW on-site data (if in-place storage)
478 !or destroy them (out-of-place storage)
479 
480 !pawrhoij datastructure
481  call timab(595,1,tsec)
482  if (present(pawrhoij_out)) then
483    call pawrhoij_free(pawrhoij_out)
484    ABI_FREE(pawrhoij_out)
485  else
486    if (algo_option==1) then
487      call pawrhoij_redistribute(pawrhoij,mpi_enreg%comm_atom,old_comm_atom,&
488 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
489 &         natom=dtset%natom)
490    else if (algo_option==2) then
491        call pawrhoij_redistribute(pawrhoij,mpi_enreg%comm_atom,old_comm_atom,&
492 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
493 &         natom=dtset%natom,&
494 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
495 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
496    end if
497  end if
498  call timab(595,2,tsec)
499 
500 !paw_ij datastructure
501  call timab(596,1,tsec)
502  if (present(paw_ij_out)) then
503    call paw_ij_free(paw_ij_out)
504    ABI_FREE(paw_ij_out)
505  else
506    if (algo_option==1) then
507      call paw_ij_redistribute(paw_ij,mpi_enreg%comm_atom,old_comm_atom,&
508 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
509 &         natom=dtset%natom)
510    else if (algo_option==2) then
511      call paw_ij_redistribute(paw_ij,mpi_enreg%comm_atom,old_comm_atom,&
512 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
513 &         natom=dtset%natom,&
514 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
515 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
516    end if
517  end if
518  call timab(596,2,tsec)
519 
520 !paw_an datastructure
521  call timab(597,1,tsec)
522  if (present(paw_an_out)) then
523    call paw_an_free(paw_an_out)
524    ABI_FREE(paw_an_out)
525  else
526    if (algo_option==1) then
527      call paw_an_redistribute(paw_an,mpi_enreg%comm_atom,old_comm_atom,&
528 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
529 &         natom=dtset%natom)
530    else if (algo_option==2) then
531      call paw_an_redistribute(paw_an,mpi_enreg%comm_atom,old_comm_atom,&
532 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
533 &         natom=dtset%natom,&
534 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
535 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
536    end if
537  end if
538  call timab(597,2,tsec)
539 
540 !pawfgrtab datastructure
541  call timab(598,1,tsec)
542  if (present(pawfgrtab_out)) then
543    call pawfgrtab_free(pawfgrtab_out)
544    ABI_FREE(pawfgrtab_out)
545  else
546    if (algo_option==1) then
547      call pawfgrtab_redistribute(pawfgrtab,mpi_enreg%comm_atom,old_comm_atom,&
548 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
549 &         natom=dtset%natom)
550    else if (algo_option==2) then
551      call pawfgrtab_redistribute(pawfgrtab,mpi_enreg%comm_atom,old_comm_atom,&
552 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
553 &         natom=dtset%natom,&
554 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
555 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
556    end if
557  end if
558  call timab(598,2,tsec)
559 
560 !Release some memory
561  if (algo_option==2.and.exchange) then
562    ABI_FREE(SendAtomProc)
563    ABI_FREE(SendAtomList)
564    ABI_FREE(RecvAtomProc)
565    ABI_FREE(RecvAtomList)
566  end if
567 
568 !Restore communicator for atoms
569  ABI_FREE(old_atmtab)
570  old_comm_atom=mpi_enreg%comm_atom
571  call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
572  call initmpi_atom(dtset,mpi_enreg)
573  my_natom=mpi_enreg%my_natom
574 
575 end  subroutine unset_pert_paw