TABLE OF CONTENTS


ABINIT/m_paral_pert [ Modules ]

[ Top ] [ Modules ]

NAME

  m_paral_pert

FUNCTION

  This module provides routines to manage the parallelisation/distribution
  over perturbations

COPYRIGHT

 Copyright (C) 2013-2018 ABINIT group (MT,FJ,MD)
 This file is distributed under the terms of the
 GNU General Public License, see ~abinit/COPYING
 or http://www.gnu.org/copyleft/gpl.txt .
 For the initials of contributors, see ~abinit/doc/developers/contributors.txt.

PARENTS

CHILDREN

SOURCE

23 #if defined HAVE_CONFIG_H
24 #include "config.h"
25 #endif
26 
27 #include "abi_common.h"
28 
29 MODULE m_paral_pert
30 
31  use defs_basis
32  use defs_abitypes
33  use m_abicore
34  use m_errors
35  use m_xmpi
36 
37  use m_time,      only : timab
38  use m_copy,      only : deep_copy
39  use m_paw_an,    only : paw_an_type, paw_an_free, paw_an_redistribute
40  use m_paw_ij,    only : paw_ij_type, paw_ij_free, paw_ij_redistribute
41  use m_pawfgrtab, only : pawfgrtab_type, pawfgrtab_free, pawfgrtab_redistribute
42  use m_pawrhoij,  only : pawrhoij_type, pawrhoij_free, pawrhoij_redistribute
43  use m_paral_atom,only : get_atm_proc
44  use m_mpinfo,    only : initmpi_atom
45 
46  implicit none
47 
48  private
49 
50 !public procedures.
51  public :: set_pert_comm
52  public :: unset_pert_comm
53  public :: set_pert_paw
54  public :: unset_pert_paw
55 
56 !private procedures.
57  private :: get_exchatom_list
58  private :: get_exchatom_list1

m_paral_atom/get_exchatom_list [ Functions ]

[ Top ] [ m_paral_atom ] [ Functions ]

NAME

 get_exchatom_list

FUNCTION

 This routine determine the list of atoms to be exchanged by current process and other processes.
 The initial communicator (mpicomm_in) should be a common ancestor of (mpicomm_in and mpicomm_out);
 It is splitted in "nbgroup" final communicators (mpicomm_out).

INPUTS

  mpicomm_in=input MPI communicator (over atoms) among which atoms are initially distributed
  mpicomm_out=MPI communicator (over atoms) among which we want to distribute atoms
  my_atmtab_in= Index of atoms initially treated by current proc (communicator mpicomm_in)
  my_atmtab_out= Index of atoms finally treated by current proc (communicator mpicomm_out)
  natom = total number of atoms
  nbgroup = # of mpicomm_out communicators included in mpicomm_in

OUTPUT

 For current proc :
 RecvAtomProc(:)= rank of processor from which I expect atom (in mpicomm_in)
 RecvAtomList(:)= indexes of atoms to be received by me
   RecvAtomList(irecv) are the atoms I expect from RecvAtomProc(irecv)
 SendAtomProc(:)= ranks of process destination of atom (in mpicomm_in)
 SendAtomList(:)= indexes of atoms to be sent by me
   SendAtomList(isend) are the atoms sent to SendAtomProc(isend)

PARENTS

      m_paral_pert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

679  subroutine get_exchatom_list(mpicomm_in,mpicomm_out,my_atmtab_in,my_atmtab_out,natom, &
680 &                             SendAtomProc,SendAtomList,RecvAtomProc,RecvAtomList)
681 
682 
683 !This section has been created automatically by the script Abilint (TD).
684 !Do not modify the following lines by hand.
685 #undef ABI_FUNC
686 #define ABI_FUNC 'get_exchatom_list'
687 !End of the abilint section
688 
689  implicit none
690 
691 !Arguments ---------------------------------------------
692 !scalars
693  integer,intent(in) :: mpicomm_in,mpicomm_out,natom
694 !arrays
695  integer,intent(in) :: my_atmtab_in(:),my_atmtab_out(:)
696  integer,allocatable,intent(out) :: RecvAtomProc(:),RecvAtomList(:)
697  integer,allocatable,intent(out) :: SendAtomList(:),SendAtomProc(:)
698 
699 !Local variables ---------------------------------------
700 !scalars
701  integer :: igroup,i1,ierr,ii,me_in,me_in_out,me_out,me_out0_in
702  integer :: my_natom_in,my_natom_out,nbgroup,nbsend,nproc_in,nproc_max,nproc_out
703 !arrays
704  integer :: buf_int(3),rank0(1),ranks0_out_in(1)
705  integer, allocatable :: buf_int_all(:),group(:),master_commout(:),procs(:),ranks(:)
706  integer, allocatable  :: ranks1(:,:),sizecomm(:)
707 
708 ! *************************************************************************
709 
710  nproc_in=xmpi_comm_size(mpicomm_in)
711  me_in=xmpi_comm_rank(mpicomm_in)
712  my_natom_in=size(my_atmtab_in)
713  nproc_out=xmpi_comm_size(mpicomm_out)
714  me_out=xmpi_comm_rank(mpicomm_out)
715  my_natom_out=size(my_atmtab_out)
716 
717  rank0(1)=0
718  call xmpi_comm_translate_ranks(mpicomm_out,1,rank0(1),mpicomm_in,ranks0_out_in(1))
719  me_out0_in=ranks0_out_in(1)
720 
721  ABI_ALLOCATE(ranks,(1:nproc_in))
722  ABI_ALLOCATE(sizecomm,(1:nproc_in))
723  ABI_ALLOCATE(master_commout,(1:nproc_in))
724  ABI_ALLOCATE(group,(0:nproc_in-1))
725  buf_int(1)=me_out; buf_int(2)=me_out0_in; buf_int(3)=nproc_out;
726 
727  ABI_ALLOCATE(buf_int_all,(3*nproc_in))
728  call xmpi_allgather(buf_int,3,buf_int_all,mpicomm_in,ierr)
729  nbgroup=0;
730  nproc_max=0
731  ranks(:)=-1;group(:)=-1;sizecomm(:)=-1;master_commout(:)=-1
732 
733  do ii=1,nproc_in
734    ranks(ii)=buf_int_all(3*ii-2)    !me_out
735    master_commout(ii)=buf_int_all(3*ii-1) !rank of me_out=0 of mpicomm_out expressed in mpicomm_in
736 
737    if (ranks(ii)==0) then
738      nbgroup=nbgroup+1
739      sizecomm(nbgroup)=buf_int_all(3*ii) !nproc_out
740      group(master_commout(ii))=nbgroup
741      if (sizecomm(nbgroup)>nproc_max) nproc_max=sizecomm(nbgroup)
742    end if
743 
744  enddo
745  ABI_DEALLOCATE(buf_int_all)
746 
747 
748  ABI_ALLOCATE(ranks1,(nbgroup,0:nproc_max-1))
749  ranks1(:,:)=-1
750  do ii=1,nproc_in
751    me_in_out=ranks(ii)
752    igroup=group(master_commout(ii))
753    ranks1(igroup,me_in_out)=ii-1 !numbering of procs
754  enddo
755 
756 !Send
757  nbsend=0
758  if (my_natom_in>0) then
759    ABI_ALLOCATE(SendAtomList,(my_natom_in*nbgroup))
760    ABI_ALLOCATE(SendAtomProc,(my_natom_in*nbgroup))
761    ABI_ALLOCATE(procs,(my_natom_in))
762    do igroup=1,nbgroup
763      call get_atm_proc(my_atmtab_in,natom,sizecomm(igroup),procs)
764      do i1=1,my_natom_in
765        nbsend=nbsend+1
766        SendAtomProc(nbsend)=ranks1(igroup,procs(i1))
767        SendAtomList(nbsend)=my_atmtab_in(i1)
768      end do
769    end do
770    ABI_DEALLOCATE(procs)
771  else
772   ABI_ALLOCATE(SendAtomList,(0))
773   ABI_ALLOCATE(SendAtomProc,(0))
774 end if
775 
776 !recv
777  if (my_natom_out>0) then !no return before because of xmpi_allgather and of the sending operation
778    ABI_ALLOCATE(RecvAtomProc,(my_natom_out))
779    ABI_ALLOCATE(RecvAtomList,(my_natom_out))
780    RecvAtomList(:)=my_atmtab_out(:)
781 !the atoms are put in increasing order,see get_my_atmtab so the procs are sorted by growing process
782    call get_atm_proc(RecvAtomList,natom,nproc_in,RecvAtomProc)
783  else
784    ABI_ALLOCATE(RecvAtomList,(0))
785    ABI_ALLOCATE(RecvAtomProc,(0))
786  end if
787 
788  ABI_DEALLOCATE(master_commout)
789  ABI_DEALLOCATE(ranks1)
790  ABI_DEALLOCATE(group)
791  ABI_DEALLOCATE(ranks)
792  ABI_DEALLOCATE(sizecomm)
793 
794 end subroutine get_exchatom_list

m_paral_atom/get_exchatom_list1 [ Functions ]

[ Top ] [ m_paral_atom ] [ Functions ]

NAME

 get_exchatom_list1

FUNCTION

 This routine determine the list of atoms to be exchanged by current process and other processes.
 this function redistribute the atoms of one mpicomm_in among mpicomm_out

INPUTS

  mpicomm_in=input MPI communicator (over atoms) among which atoms are initially distributed
  mpicomm_out=MPI communicator (over atoms) among which we want to distribute atoms
  my_atmtab_in= Index of atoms initially treated by current proc (communicator mpicomm_in)
  my_atmtab_out= Index of atoms finally treated by current proc (communicator mpicomm_out)
  natom = total number of atoms

OUTPUT

 For current proc:
 RecvAtomProc(:)= rank of processor from which I expect atom (in mpicomm_out)
 RecvAtomList(:)= indexes of atoms to be received by me
   RecvAtomList(irecv) are the atoms I expect from RecvAtomProc(irecv)
 SendAtomProc(:)= ranks of process destination of atom (in mpicomm_out)
 SendAtomList(:)= indexes of atoms to be sent by me
   SendAtomList(isend) are the atoms sent to SendAtomProc(isend)

NOTES

 Previously mpicomm_out has be split in several mpicomm_in.
 In our purpose, we only need to redistribute the atoms of one mpicomm_in among mpicomm_out
 because all structures of atoms we have to exchange have the same value in each mpicomm_in.
 The mpicomm_in we choose is the one in which the processor 0 of mpicomm_out belong

PARENTS

      m_paral_pert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

837 subroutine get_exchatom_list1(mpicomm_in,mpicomm_out,my_atmtab_in,my_atmtab_out,natom, &
838 &                             SendAtomProc,SendAtomList,RecvAtomProc,RecvAtomList)
839 
840 
841 !This section has been created automatically by the script Abilint (TD).
842 !Do not modify the following lines by hand.
843 #undef ABI_FUNC
844 #define ABI_FUNC 'get_exchatom_list1'
845 !End of the abilint section
846 
847  implicit none
848 
849 !Arguments ---------------------------------------------
850 !scalars
851  integer,intent(in) :: mpicomm_in,mpicomm_out,natom
852 !arrays
853  integer,intent(in) :: my_atmtab_in(:),my_atmtab_out(:)
854  integer,allocatable,intent(out) :: RecvAtomProc(:),RecvAtomList(:)
855  integer,allocatable,intent(out) :: SendAtomList(:),SendAtomProc(:)
856 
857 !Local variables ---------------------------------------
858 !scalars
859  integer :: i1,ier,me_out,my_natom_in,my_natom_out,nproc_in,nproc_out
860  logical :: sender
861 !arrays
862  integer,allocatable :: ranks_in(:),ranks_in_out(:)
863 
864 ! *************************************************************************
865 
866  me_out=xmpi_comm_rank(mpicomm_out)
867  my_natom_in=size(my_atmtab_in)
868  nproc_out=xmpi_comm_size(mpicomm_out)
869  my_natom_out=size(my_atmtab_out)
870  nproc_in=xmpi_comm_size(mpicomm_in)
871 
872  call xmpi_bcast(nproc_in,0,mpicomm_out,ier)
873  ABI_ALLOCATE(ranks_in_out,(0:nproc_in-1))
874 
875 !All atoms are distributed among each mpicomm_in
876 !redistribute the atoms of one mpicomm_in among mpicomm_out
877 
878 !Look for the communicator mpicomm_in from which me_out=0 belong
879 !Get ranks of all processors of mpicomm_in expressed in mpicomm_out
880  if (me_out==0) then
881    ABI_ALLOCATE(ranks_in,(0:nproc_in-1))
882    ranks_in=(/ (i1,i1=0,nproc_in-1 )/)
883    call xmpi_comm_translate_ranks(mpicomm_in,nproc_in,ranks_in,mpicomm_out,ranks_in_out)
884    ABI_DEALLOCATE(ranks_in)
885  end if
886  call xmpi_bcast(ranks_in_out,0,mpicomm_out,ier)
887 
888 !Check if me_out is one of the sending proc.
889 !(ie belongs to the mpicomm_in from which me_out belong)
890  sender = .false.
891  do i1=0,nproc_in-1
892    if (me_out==ranks_in_out(i1)) then
893      sender = .true.
894      exit
895    end if
896  end do
897 
898 !Send
899  if (my_natom_in>0.and.sender) then
900    ABI_ALLOCATE(SendAtomList,(my_natom_in))
901    ABI_ALLOCATE(SendAtomProc,(my_natom_in))
902    SendAtomList(:)=my_atmtab_in(:)
903 !  The atoms are put in increasing order,see get_my_atmtab
904 !  so the procs are sorted by growing process
905    call get_atm_proc(SendAtomList,natom,nproc_out,SendAtomProc)
906  else
907    ABI_ALLOCATE(SendAtomList,(0))
908    ABI_ALLOCATE(SendAtomProc,(0))
909  end if
910 
911 !Recv
912  if (my_natom_out>0) then
913    ABI_ALLOCATE(RecvAtomProc,(my_natom_out))
914    ABI_ALLOCATE(RecvAtomList,(my_natom_out))
915    RecvAtomList(:)=my_atmtab_out(:)
916 !  The atoms are put in increasing order,see get_my_atmtab
917 !  so the procs are sorted by growing process
918    call get_atm_proc(RecvAtomList,natom,nproc_in,RecvAtomProc)
919    RecvAtomProc(:)=ranks_in_out(RecvAtomProc(:))
920  else
921    ABI_ALLOCATE(RecvAtomList,(0))
922    ABI_ALLOCATE(RecvAtomProc,(0))
923  end if
924 
925  ABI_DEALLOCATE(ranks_in_out)
926 
927 end subroutine get_exchatom_list1

m_paral_pert/set_pert_comm [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 set_pert_comm

FUNCTION

 Set the MPI communicators over the perturbed cell

INPUTS

  nppert=number of processes for the parallelization over perturbations

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization

PARENTS

      dfpt_looppert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

 87 subroutine set_pert_comm(mpi_enreg,nppert)
 88 
 89 
 90 !This section has been created automatically by the script Abilint (TD).
 91 !Do not modify the following lines by hand.
 92 #undef ABI_FUNC
 93 #define ABI_FUNC 'set_pert_comm'
 94 !End of the abilint section
 95 
 96  implicit none
 97 
 98 !Arguments ---------------------------------------------
 99 !scalars
100  integer,intent(in) :: nppert
101  type(MPI_type), intent(inout) :: mpi_enreg
102 
103 ! *************************************************************************
104 
105  if (mpi_enreg%paral_pert==0) return
106 
107 !New communicators for the perturbed cell
108  mpi_enreg%comm_cell =mpi_enreg%comm_cell_pert
109  mpi_enreg%me_cell   =xmpi_comm_rank(mpi_enreg%comm_cell)
110  mpi_enreg%nproc_cell=mpi_enreg%nproc/nppert
111 
112 !Adjust other communicators
113  mpi_enreg%comm_kpt    =mpi_enreg%comm_cell
114  mpi_enreg%me_kpt      =mpi_enreg%me_cell
115  mpi_enreg%comm_kptband=mpi_enreg%comm_cell
116  mpi_enreg%comm_wvl    =mpi_enreg%comm_cell
117 
118 end subroutine set_pert_comm

m_paral_pert/set_pert_paw [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 set_pert_paw

FUNCTION

 Set PAW data for the parallelization over perturbations:
  Set MPI communicator over atomic sites
  Redistribute PAW on-site data

INPUTS

  dtset <type(dataset_type)>=all input variables for this dataset

OUTPUT

  paw_an_out(my_natom)<type(paw_an_type)>= --optional--
              redistributed PAW arrays given on angular mesh
              if not present, paw_an is redistributed "in-place"
  paw__ij_out(my_natom)<type(paw_ij_type)>= --optional--
              redistributed PAW arrays given on (i,j) channels
              if not present, paw_ij is redistributed "in-place"
  pawfgrtab_out(my_natom)<type(pawfgrtab_type)>= --optional--
              redistributed PAW atomic data given on fine grid
              if not present, pawfgrtab is redistributed "in-place"
  pawrhoij_out(my_natom)<type(pawrhoij_type)>= --optional--
              redistributed PAW rhoij occupancies
              if not present, pawrhoij is redistributed "in-place"
  old_atmtab=save the indexes of the atoms treated by current proc
  old_comm_atom=save the identifier of the MPI communicator

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor
  paw_an(my_natom)<type(paw_an_type)>=PAW arrays given on angular mesh
  paw_ij(my_natom)<type(paw_ij_type)>=PAW arrays given on (i,j) channels
  pawfgrtab(my_natom)<type(pawfgrtab_type)>=PAW atomic data given on fine grid
  pawrhoij(my_natom)<type(pawrhoij_type)>=PAW rhoij occupancies

PARENTS

      dfpt_looppert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

220 subroutine set_pert_paw(dtset,mpi_enreg,my_natom,old_atmtab,old_comm_atom,&
221 &                       paw_an,paw_ij,pawfgrtab,pawrhoij,&
222 &                       paw_an_out,paw_ij_out,pawfgrtab_out,pawrhoij_out)
223 
224 
225 !This section has been created automatically by the script Abilint (TD).
226 !Do not modify the following lines by hand.
227 #undef ABI_FUNC
228 #define ABI_FUNC 'set_pert_paw'
229 !End of the abilint section
230 
231  implicit none
232 
233 !Arguments ---------------------------------------------
234 !scalars
235  integer,intent(inout) :: my_natom
236  integer,intent(inout) :: old_comm_atom
237  type(dataset_type), intent(in) :: dtset
238  type(MPI_type), intent(inout) :: mpi_enreg
239 !arrays
240  integer,pointer,intent(out) :: old_atmtab(:)
241  type(paw_ij_type),allocatable,target,intent(inout) :: paw_ij(:)
242  type(paw_ij_type),optional,pointer,intent(inout) :: paw_ij_out(:)
243  type(paw_an_type),allocatable,target,intent(inout) :: paw_an(:)
244  type(paw_an_type),optional,pointer,intent(inout) :: paw_an_out(:)
245  type(pawfgrtab_type),allocatable,target,intent(inout) :: pawfgrtab(:)
246  type(pawfgrtab_type),optional,pointer,intent(inout) :: pawfgrtab_out(:)
247  type(pawrhoij_type),allocatable,target,intent(inout) :: pawrhoij(:)
248  type(pawrhoij_type),optional,pointer,intent(inout) :: pawrhoij_out(:)
249 
250 !Local variables ---------------------------------------
251 !scalars
252 !Type of algo used for communications: 1-brute force, 2-asynchronous
253  integer,parameter :: algo_option=2
254  logical :: paral_atom
255 !arrays
256  integer,allocatable :: SendAtomProc(:), SendAtomList(:)
257  integer,allocatable :: RecvAtomProc(:), RecvAtomList(:)
258  real(dp) :: tsec(2)
259 
260 ! *************************************************************************
261 
262  call timab(593,1,tsec)
263 
264  paral_atom=(dtset%natom/=my_natom)
265 
266 !Default value when parallelization over atomic sites is not activated
267  if ((mpi_enreg%paral_pert==0).or.(.not.paral_atom).or.(dtset%usepaw==0)) then
268    old_comm_atom=mpi_enreg%comm_atom
269    call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
270    if (present(pawrhoij_out))  pawrhoij_out=>pawrhoij
271    if (present(paw_ij_out))    paw_ij_out=>paw_ij
272    if (present(paw_an_out))    paw_an_out=>paw_an
273    if (present(pawfgrtab_out)) pawfgrtab_out=>pawfgrtab
274    return
275  end if
276 
277 !Redefine communicator for atoms and store old communicator
278 
279  old_comm_atom=mpi_enreg%comm_atom
280  call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
281  call initmpi_atom(dtset,mpi_enreg)
282  my_natom=mpi_enreg%my_natom
283 
284 !If "asynchronous algo", determine lists of atoms to be exchanged
285 !  between me and other processes
286  if (algo_option==2) then
287    call timab(594,1,tsec)
288    call get_exchatom_list(old_comm_atom,mpi_enreg%comm_atom,old_atmtab,&
289 &       mpi_enreg%my_atmtab,dtset%natom,SendAtomProc,&
290 &       SendAtomList,RecvAtomProc,RecvAtomList)
291    call timab(594,2,tsec)
292  end if
293 
294 !Redistribute PAW on-site data
295 
296 !pawrhoij datastructure
297  call timab(595,1,tsec)
298  if (present(pawrhoij_out)) then
299    nullify(pawrhoij_out)
300    if (algo_option==1) then
301      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
302 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
303 &         natom=dtset%natom,pawrhoij_out=pawrhoij_out)
304    else if (algo_option==2) then
305      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
306 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
307 &         natom=dtset%natom,pawrhoij_out=pawrhoij_out, &
308 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
309 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
310    end if
311  else
312    if (algo_option==1) then
313      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
314 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
315 &         natom=dtset%natom)
316    else if (algo_option==2) then
317      call pawrhoij_redistribute(pawrhoij,old_comm_atom,mpi_enreg%comm_atom,&
318 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
319 &         natom=dtset%natom,&
320 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
321 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
322    end if
323  end if
324  call timab(595,2,tsec)
325 
326 !paw_ij datastructure
327  call timab(596,1,tsec)
328  if (present(paw_ij_out)) then
329    nullify(paw_ij_out)
330    if (algo_option==1) then
331      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
332 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
333 &         natom=dtset%natom,paw_ij_out=paw_ij_out)
334    else if (algo_option==2) then
335      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
336 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
337 &         natom=dtset%natom,paw_ij_out=paw_ij_out,&
338 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
339 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
340    end if
341  else
342    if (algo_option==1) then
343      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
344 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
345 &         natom=dtset%natom)
346    else if (algo_option==2) then
347      call paw_ij_redistribute(paw_ij,old_comm_atom,mpi_enreg%comm_atom,&
348 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
349 &         natom=dtset%natom,&
350 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
351 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
352    end if
353  end if
354  call timab(596,2,tsec)
355 
356 !paw_an datastructure
357  call timab(597,1,tsec)
358  if (present(paw_an_out)) then
359    nullify(paw_an_out)
360    if (algo_option==1) then
361      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
362 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
363 &         natom=dtset%natom,paw_an_out=paw_an_out)
364    else if (algo_option==2) then
365      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
366 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
367 &         natom=dtset%natom,paw_an_out=paw_an_out,&
368 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
369 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
370    end if
371  else
372    if (algo_option==1) then
373      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
374 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
375 &         natom=dtset%natom)
376    else if (algo_option==2) then
377      call paw_an_redistribute(paw_an,old_comm_atom,mpi_enreg%comm_atom,&
378 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
379 &         natom=dtset%natom,&
380 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
381 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
382    end if
383  end if
384  call timab(597,2,tsec)
385 
386 !pawfgrtab datastructure
387  call timab(598,1,tsec)
388  if (present(pawfgrtab_out)) then
389    nullify(pawfgrtab_out)
390    if (algo_option==1) then
391      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
392 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
393 &         natom=dtset%natom,pawfgrtab_out=pawfgrtab_out)
394    else if (algo_option==2) then
395      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
396 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
397 &         natom=dtset%natom,pawfgrtab_out=pawfgrtab_out,&
398 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
399 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
400    end if
401  else
402    if (algo_option==1) then
403      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
404 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
405 &         natom=dtset%natom)
406    else if (algo_option==2) then
407      call pawfgrtab_redistribute(pawfgrtab,old_comm_atom,mpi_enreg%comm_atom,&
408 &         mpi_atmtab_in=old_atmtab,mpi_atmtab_out=mpi_enreg%my_atmtab,&
409 &         natom=dtset%natom,&
410 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
411 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
412    end if
413  end if
414  call timab(598,2,tsec)
415 
416  if (algo_option==2) then
417    ABI_DEALLOCATE(SendAtomProc)
418    ABI_DEALLOCATE(SendAtomList)
419    ABI_DEALLOCATE(RecvAtomProc)
420    ABI_DEALLOCATE(RecvAtomList)
421  end if
422 
423  call timab(593,2,tsec)
424 
425 end  subroutine set_pert_paw

m_paral_pert/unset_pert_comm [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 unset_pert_comm

FUNCTION

 Unset the MPI communicators over the perturbed cell; restore the global communicators.

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization

PARENTS

      dfpt_looppert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

141 subroutine unset_pert_comm(mpi_enreg)
142 
143 
144 !This section has been created automatically by the script Abilint (TD).
145 !Do not modify the following lines by hand.
146 #undef ABI_FUNC
147 #define ABI_FUNC 'unset_pert_comm'
148 !End of the abilint section
149 
150  implicit none
151 
152 !Arguments ---------------------------------------------
153 !scalars
154  type(MPI_type), intent(inout) :: mpi_enreg
155 
156 ! *************************************************************************
157 
158  if (mpi_enreg%paral_pert==0) return
159 
160 !New communicators for the cell
161  mpi_enreg%comm_cell =mpi_enreg%comm_world
162  mpi_enreg%nproc_cell=mpi_enreg%nproc
163  mpi_enreg%me_cell   =mpi_enreg%me
164 
165 !Adjust other communicators
166  mpi_enreg%comm_kpt    =mpi_enreg%comm_cell
167  mpi_enreg%me_kpt      =mpi_enreg%me_cell
168  mpi_enreg%comm_kptband=mpi_enreg%comm_cell
169  mpi_enreg%comm_wvl    =mpi_enreg%comm_cell
170 
171 end  subroutine unset_pert_comm

m_paral_pert/unset_pert_paw [ Functions ]

[ Top ] [ m_paral_pert ] [ Functions ]

NAME

 unset_pert_paw

FUNCTION

 Unset PAW data after the parallelization over perturbations:
  Restore MPI communicator over atomic sites
  Restore PAW on-site data

INPUTS

  dtset <type(dataset_type)>=all input variables for this dataset
  old_atmtab=index of atoms to restore
  old_comm_atom=MPI communicator to restore
  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor

OUTPUT

  paw_an_out(my_natom)<type(paw_an_type)>= --optional--
              redistributed PAW arrays given on angular mesh
              if not present, paw_an is restored "in-place"
  paw__ij_out(my_natom)<type(paw_ij_type)>= --optional--
              redistributed PAW arrays given on (i,j) channels
              if not present, paw_ij is restored "in-place"
  pawfgrtab_out(my_natom)<type(pawfgrtab_type)>= --optional--
              redistributed PAW atomic data given on fine grid
              if not present, pawfgrtab is restored "in-place"
  pawrhoij_out(my_natom)<type(pawrhoij_type)>= --optional--
              redistributed PAW rhoij occupancies
              if not present, pawrhoij is restored "in-place"
 old_atmtab=save the indexes of atoms treated by current proc
 old_comm_atom=save the identifier of the MPI communicator

SIDE EFFECTS

  mpi_enreg=information about MPI parallelization
  my_natom=number of atoms treated by current processor
  paw_an(my_natom)<type(paw_an_type)>=PAW arrays given on angular mesh
  paw_ij(my_natom)<type(paw_ij_type)>=PAW arrays given on (i,j) channels
  pawfgrtab(my_natom)<type(pawfgrtab_type)>=PAW atomic data given on fine grid
  pawrhoij(my_natom)<type(pawrhoij_type)>=PAW rhoij occupancies

PARENTS

      dfpt_looppert

CHILDREN

      get_atm_proc,xmpi_bcast,xmpi_comm_translate_ranks

SOURCE

478 subroutine unset_pert_paw(dtset,mpi_enreg,my_natom,old_atmtab,old_comm_atom,&
479 &                       paw_an,paw_ij,pawfgrtab,pawrhoij,&
480 &                       paw_an_out,paw_ij_out,pawfgrtab_out,pawrhoij_out)
481 
482 
483 !This section has been created automatically by the script Abilint (TD).
484 !Do not modify the following lines by hand.
485 #undef ABI_FUNC
486 #define ABI_FUNC 'unset_pert_paw'
487 !End of the abilint section
488 
489  implicit none
490 
491 !Arguments ---------------------------------------------
492 !scalars
493  integer,intent(inout) :: my_natom
494  integer,intent(inout) :: old_comm_atom
495  type(dataset_type), intent(in) :: dtset
496  type(MPI_type), intent(inout) :: mpi_enreg
497 !arrays
498  integer,pointer,intent(inout) :: old_atmtab(:)
499  type(paw_ij_type),allocatable,target,intent(inout) :: paw_ij(:)
500  type(paw_ij_type),optional,pointer,intent(inout) :: paw_ij_out(:)
501  type(paw_an_type),allocatable,target,intent(inout) :: paw_an(:)
502  type(paw_an_type),optional,pointer,intent(inout) :: paw_an_out(:)
503  type(pawfgrtab_type),allocatable,target,intent(inout) :: pawfgrtab(:)
504  type(pawfgrtab_type),optional,pointer,intent(inout) :: pawfgrtab_out(:)
505  type(pawrhoij_type),allocatable,target,intent(inout) :: pawrhoij(:)
506  type(pawrhoij_type),optional,pointer,intent(inout) :: pawrhoij_out(:)
507 
508 !Local variables ---------------------------------------
509 !scalars
510 !Type of algo used for communications: 1-brute force, 2-asynchronous
511  integer,parameter :: algo_option=2
512 integer :: my_natom_old
513  logical :: exchange,paral_atom
514 !arrays
515  integer,allocatable :: SendAtomProc(:), SendAtomList(:)
516  integer,allocatable :: RecvAtomProc(:), RecvAtomList(:)
517  real(dp) :: tsec(2)
518 
519 ! *************************************************************************
520 
521 !Nothing to do when parallelization over atomic sites is not activated
522  if ((mpi_enreg%paral_pert==0).or.(dtset%usepaw==0)) return
523  if (.not.associated(old_atmtab)) return
524  paral_atom=(dtset%natom/=size(old_atmtab))
525  if (.not.paral_atom) return
526 
527 !If "asynchronous algo", determine lists of atoms to be exchanged
528 !  between me and other processes
529  exchange=.true.
530  if (present(pawrhoij_out).and.present(paw_ij_out).and. &
531 &    present(paw_an_out)  .and.present(pawfgrtab_out)) exchange=.false.
532  if (algo_option==2.and.exchange) then
533    call timab(594,1,tsec)
534    my_natom_old=size(old_atmtab)
535    call get_exchatom_list1(mpi_enreg%comm_atom,old_comm_atom,mpi_enreg%my_atmtab,&
536 &       old_atmtab,dtset%natom,SendAtomProc,&
537 &       SendAtomList,RecvAtomProc,RecvAtomList)
538    call timab(594,2,tsec)
539  end if
540 
541 !Redistribute PAW on-site data (if in-place storage)
542 !or destroy them (out-of-place storage)
543 
544 !pawrhoij datastructure
545  call timab(595,1,tsec)
546  if (present(pawrhoij_out)) then
547    call pawrhoij_free(pawrhoij_out)
548    ABI_DATATYPE_DEALLOCATE(pawrhoij_out)
549  else
550    if (algo_option==1) then
551      call pawrhoij_redistribute(pawrhoij,mpi_enreg%comm_atom,old_comm_atom,&
552 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
553 &         natom=dtset%natom)
554    else if (algo_option==2) then
555        call pawrhoij_redistribute(pawrhoij,mpi_enreg%comm_atom,old_comm_atom,&
556 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
557 &         natom=dtset%natom,&
558 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
559 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
560    end if
561  end if
562  call timab(595,2,tsec)
563 
564 !paw_ij datastructure
565  call timab(596,1,tsec)
566  if (present(paw_ij_out)) then
567    call paw_ij_free(paw_ij_out)
568    ABI_DATATYPE_DEALLOCATE(paw_ij_out)
569  else
570    if (algo_option==1) then
571      call paw_ij_redistribute(paw_ij,mpi_enreg%comm_atom,old_comm_atom,&
572 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
573 &         natom=dtset%natom)
574    else if (algo_option==2) then
575      call paw_ij_redistribute(paw_ij,mpi_enreg%comm_atom,old_comm_atom,&
576 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
577 &         natom=dtset%natom,&
578 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
579 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
580    end if
581  end if
582  call timab(596,2,tsec)
583 
584 !paw_an datastructure
585  call timab(597,1,tsec)
586  if (present(paw_an_out)) then
587    call paw_an_free(paw_an_out)
588    ABI_DATATYPE_DEALLOCATE(paw_an_out)
589  else
590    if (algo_option==1) then
591      call paw_an_redistribute(paw_an,mpi_enreg%comm_atom,old_comm_atom,&
592 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
593 &         natom=dtset%natom)
594    else if (algo_option==2) then
595      call paw_an_redistribute(paw_an,mpi_enreg%comm_atom,old_comm_atom,&
596 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
597 &         natom=dtset%natom,&
598 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
599 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
600    end if
601  end if
602  call timab(597,2,tsec)
603 
604 !pawfgrtab datastructure
605  call timab(598,1,tsec)
606  if (present(pawfgrtab_out)) then
607    call pawfgrtab_free(pawfgrtab_out)
608    ABI_DATATYPE_DEALLOCATE(pawfgrtab_out)
609  else
610    if (algo_option==1) then
611      call pawfgrtab_redistribute(pawfgrtab,mpi_enreg%comm_atom,old_comm_atom,&
612 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
613 &         natom=dtset%natom)
614    else if (algo_option==2) then
615      call pawfgrtab_redistribute(pawfgrtab,mpi_enreg%comm_atom,old_comm_atom,&
616 &         mpi_atmtab_in=mpi_enreg%my_atmtab,mpi_atmtab_out=old_atmtab,&
617 &         natom=dtset%natom,&
618 &         SendAtomproc=SendAtomProc,SendAtomList=SendAtomList,&
619 &         RecvAtomProc=RecvAtomProc,RecvAtomList=RecvAtomList)
620    end if
621  end if
622  call timab(598,2,tsec)
623 
624 !Release some memory
625  if (algo_option==2.and.exchange) then
626    ABI_DEALLOCATE(SendAtomProc)
627    ABI_DEALLOCATE(SendAtomList)
628    ABI_DEALLOCATE(RecvAtomProc)
629    ABI_DEALLOCATE(RecvAtomList)
630  end if
631 
632 !Restore communicator for atoms
633  ABI_DEALLOCATE(old_atmtab)
634  old_comm_atom=mpi_enreg%comm_atom
635  call deep_copy(mpi_enreg%my_atmtab,old_atmtab)
636  call initmpi_atom(dtset,mpi_enreg)
637  my_natom=mpi_enreg%my_natom
638 
639 end  subroutine unset_pert_paw