TABLE OF CONTENTS
ABINIT/initmpi_grid [ Functions ]
NAME
initmpi_grid
FUNCTION
Initializes the MPI information for the grid: * 2D if parallization KPT/FFT (!paral_kgb & MPI) * 3D if parallization KPT/FFT/BAND (paral_kgb & MPI) * 2D in case of an Hartree-Fock calculation
COPYRIGHT
Copyright (C) 2005-2018 ABINIT group This file is distributed under the terms of the GNU General Public License, see ~abinit/COPYING or http://www.gnu.org/copyleft/gpl.txt. For the initials of contributors, see ~abinit/doc/developers/contributors.txt.
INPUTS
OUTPUT
SIDE EFFECTS
mpi_enreg=informations about MPI parallelization
PARENTS
mpi_setup
CHILDREN
mpi_cart_coords,mpi_cart_create,mpi_cart_sub,mpi_comm_rank,wrtout xmpi_abort,xmpi_comm_free
SOURCE
36 #if defined HAVE_CONFIG_H 37 #include "config.h" 38 #endif 39 40 #include "abi_common.h" 41 42 43 subroutine initmpi_grid(mpi_enreg) 44 45 use defs_basis 46 use defs_abitypes 47 use m_errors 48 use m_profiling_abi 49 use m_xmpi 50 51 #if defined HAVE_MPI2 && !defined FC_G95 52 use mpi 53 #endif 54 55 !This section has been created automatically by the script Abilint (TD). 56 !Do not modify the following lines by hand. 57 #undef ABI_FUNC 58 #define ABI_FUNC 'initmpi_grid' 59 use interfaces_14_hidewrite 60 !End of the abilint section 61 62 implicit none 63 64 #if defined HAVE_MPI1 || (defined HAVE_MPI && defined FC_G95) 65 include 'mpif.h' 66 #endif 67 68 !Arguments ------------------------------------ 69 type(MPI_type),intent(inout) :: mpi_enreg 70 !Local variables------------------------------- 71 !scalars 72 integer :: nproc,nproc_eff,spacecomm 73 character(len=500) :: msg 74 #if defined HAVE_MPI 75 integer :: commcart_4d,dimcart,ierr,me_cart_4d 76 integer :: commcart_2d,me_cart_2d 77 logical :: reorder 78 #endif 79 !arrays 80 #if defined HAVE_MPI 81 integer,allocatable :: coords(:),sizecart(:) 82 logical,allocatable :: periode(:), keepdim(:) 83 #endif 84 85 ! ********************************************************************* 86 87 DBG_ENTER("COLL") 88 89 !Select the correct "world" communicator" 90 nproc=mpi_enreg%nproc_cell 91 if(mpi_enreg%paral_pert==1) nproc=mpi_enreg%nproc_cell 92 spacecomm=mpi_enreg%comm_cell 93 94 !Fake values for null communicator 95 if (nproc==0) then 96 mpi_enreg%nproc_fft = 0 97 mpi_enreg%nproc_band = 0 98 mpi_enreg%nproc_hf = 0 99 mpi_enreg%nproc_kpt = 0 100 mpi_enreg%nproc_spinor = 0 101 mpi_enreg%comm_fft = xmpi_comm_null 102 mpi_enreg%comm_band = xmpi_comm_null 103 mpi_enreg%comm_hf = xmpi_comm_null 104 mpi_enreg%comm_kpt = xmpi_comm_null 105 mpi_enreg%comm_kptband = xmpi_comm_null 106 mpi_enreg%comm_spinor = xmpi_comm_null 107 mpi_enreg%comm_bandspinor = xmpi_comm_null 108 mpi_enreg%comm_spinorfft = xmpi_comm_null 109 mpi_enreg%comm_bandfft = xmpi_comm_null 110 mpi_enreg%comm_bandspinorfft = xmpi_comm_null 111 mpi_enreg%bandpp = 1 112 return 113 end if 114 115 #if defined HAVE_MPI 116 117 if (mpi_enreg%paral_hf==0) then 118 ! either the option Fock exchange is not active or there is no parallelization on Fock exchange calculation. 119 120 if (mpi_enreg%nproc_spinor>1) mpi_enreg%paral_spinor=1 121 122 !Effective number of processors used for the grid 123 nproc_eff=mpi_enreg%nproc_fft*mpi_enreg%nproc_band *mpi_enreg%nproc_kpt*mpi_enreg%nproc_spinor 124 if(nproc_eff/=nproc) then 125 write(msg,'(4a,5(a,i0))') & 126 & ' The number of band*FFT*kpt*spinor processors, npband*npfft*npkpt*npspinor should be',ch10,& 127 & ' equal to the total number of processors, nproc.',ch10,& 128 & ' However, npband =',mpi_enreg%nproc_band,& 129 & ' npfft =',mpi_enreg%nproc_fft,& 130 & ' npkpt =',mpi_enreg%nproc_kpt,& 131 & ' npspinor =',mpi_enreg%nproc_spinor,& 132 & ' and nproc =',nproc 133 MSG_WARNING(msg) 134 end if 135 136 !Nothing to do if only 1 proc 137 if (nproc_eff==1) return 138 139 ! Initialize the communicator for Hartree-Fock to xmpi_comm_self 140 mpi_enreg%me_hf =0 141 mpi_enreg%comm_hf=xmpi_comm_self 142 143 if(mpi_enreg%paral_kgb==0) then 144 mpi_enreg%me_fft =0 145 mpi_enreg%me_band=0 146 mpi_enreg%me_kpt =mpi_enreg%me_cell 147 mpi_enreg%me_spinor=0 148 mpi_enreg%comm_fft=xmpi_comm_self 149 mpi_enreg%comm_band=xmpi_comm_self 150 mpi_enreg%comm_kpt=mpi_enreg%comm_cell 151 mpi_enreg%comm_spinor=xmpi_comm_self 152 mpi_enreg%comm_bandspinor=xmpi_comm_self 153 mpi_enreg%comm_kptband=mpi_enreg%comm_cell 154 mpi_enreg%comm_spinorfft=xmpi_comm_self 155 mpi_enreg%comm_bandfft=xmpi_comm_self 156 mpi_enreg%comm_bandspinorfft=xmpi_comm_self 157 else 158 ! CREATE THE 4D GRID 159 ! ================================================== 160 161 ! Create the global cartesian 4D- communicator 162 ! valgrind claims this is not deallocated in test v5/72 163 ! Can someone knowledgable check? 164 dimcart=4 165 ABI_ALLOCATE(sizecart,(dimcart)) 166 ABI_ALLOCATE(periode,(dimcart)) 167 ! MT 2012-june: change the order of the indexes; not sure this is efficient 168 ! (not efficient on TGCC-Curie). 169 sizecart(1)=mpi_enreg%nproc_kpt ! mpi_enreg%nproc_kpt 170 sizecart(2)=mpi_enreg%nproc_band ! mpi_enreg%nproc_band 171 sizecart(3)=mpi_enreg%nproc_spinor ! mpi_enreg%nproc_spinor 172 sizecart(4)=mpi_enreg%nproc_fft ! mpi_enreg%nproc_fft 173 periode(:)=.false.;reorder=.false. 174 call MPI_CART_CREATE(spacecomm,dimcart,sizecart,periode,reorder,commcart_4d,ierr) 175 ABI_DEALLOCATE(periode) 176 ABI_DEALLOCATE(sizecart) 177 178 ! Find the index and coordinates of the current processor 179 call MPI_COMM_RANK(commcart_4d, me_cart_4d, ierr) 180 ABI_ALLOCATE(coords,(dimcart)) 181 call MPI_CART_COORDS(commcart_4d, me_cart_4d,dimcart,coords,ierr) 182 mpi_enreg%me_kpt =coords(1) 183 mpi_enreg%me_band=coords(2) 184 mpi_enreg%me_spinor=coords(3) 185 mpi_enreg%me_fft =coords(4) 186 ABI_DEALLOCATE(coords) 187 188 ABI_ALLOCATE(keepdim,(dimcart)) 189 190 ! Create the communicator for fft distribution 191 keepdim(1)=.false. 192 keepdim(2)=.false. 193 keepdim(3)=.false. 194 keepdim(4)=.true. 195 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_fft,ierr) 196 197 ! Create the communicator for band distribution 198 keepdim(1)=.false. 199 keepdim(2)=.true. 200 keepdim(3)=.false. 201 keepdim(4)=.false. 202 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_band,ierr) 203 204 ! Create the communicator for kpt distribution 205 keepdim(1)=.true. 206 keepdim(2)=.false. 207 keepdim(3)=.false. 208 keepdim(4)=.false. 209 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_kpt,ierr) 210 211 ! Create the communicator for spinor distribution 212 keepdim(1)=.false. 213 keepdim(2)=.false. 214 keepdim(3)=.true. 215 keepdim(4)=.false. 216 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_spinor,ierr) 217 218 ! Create the communicator for band-spinor distribution 219 keepdim(1)=.false. 220 keepdim(2)=.true. 221 keepdim(3)=.true. 222 keepdim(4)=.false. 223 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_bandspinor,ierr) 224 if (ierr /= MPI_SUCCESS ) then 225 call xmpi_abort(mpi_enreg%comm_world,ierr) 226 end if 227 228 ! Create the communicator for kpt-band distribution 229 keepdim(1)=.true. 230 keepdim(2)=.true. 231 keepdim(3)=.false. 232 keepdim(4)=.false. 233 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_kptband,ierr) 234 235 ! Create the communicator for fft-spinor distribution 236 keepdim(1)=.false. 237 keepdim(2)=.false. 238 keepdim(3)=.true. 239 keepdim(4)=.true. 240 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_spinorfft,ierr) 241 242 ! Create the communicator for fft-band distribution 243 keepdim(1)=.false. 244 keepdim(2)=.true. 245 keepdim(3)=.false. 246 keepdim(4)=.true. 247 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_bandfft,ierr) 248 249 ! Create the communicator for fft-band-spinor distribution 250 keepdim(1)=.false. 251 keepdim(2)=.true. 252 keepdim(3)=.true. 253 keepdim(4)=.true. 254 call MPI_CART_SUB(commcart_4d, keepdim, mpi_enreg%comm_bandspinorfft,ierr) 255 256 ABI_DEALLOCATE(keepdim) 257 call xmpi_comm_free(commcart_4d) 258 end if 259 260 ! Write some data 261 write(msg,'(a,4i5)') 'npfft, npband, npspinor and npkpt: ',& 262 & mpi_enreg%nproc_fft,mpi_enreg%nproc_band, & 263 & mpi_enreg%nproc_spinor,mpi_enreg%nproc_kpt 264 call wrtout(std_out,msg,'COLL') 265 write(msg,'(a,4i5)') 'me_fft, me_band, me_spinor , me_kpt: ',& 266 & mpi_enreg%me_fft,mpi_enreg%me_band,& 267 & mpi_enreg%me_spinor, mpi_enreg%me_kpt 268 269 else ! paral_hf==1 270 !* Option Hartree-Fock is active and more than 1 processor is dedicated to the parallelization over occupied states. 271 272 !* Initialize the values of fft, band and spinor communicators, as in the case paral_kgb==0. 273 mpi_enreg%me_fft =0 274 mpi_enreg%me_band=0 275 mpi_enreg%me_spinor=0 276 mpi_enreg%comm_fft=xmpi_comm_self 277 mpi_enreg%comm_band=xmpi_comm_self 278 mpi_enreg%comm_spinor=xmpi_comm_self 279 mpi_enreg%comm_bandspinor=xmpi_comm_self 280 mpi_enreg%comm_kptband=mpi_enreg%comm_cell 281 mpi_enreg%comm_spinorfft=xmpi_comm_self 282 mpi_enreg%comm_bandfft=xmpi_comm_self 283 mpi_enreg%comm_bandspinorfft=xmpi_comm_self 284 285 !* Create the global cartesian 2D- communicator 286 dimcart=2 287 ABI_ALLOCATE(sizecart,(dimcart)) 288 ABI_ALLOCATE(periode,(dimcart)) 289 sizecart(1)=mpi_enreg%nproc_kpt ! mpi_enreg%nproc_kpt 290 sizecart(2)=mpi_enreg%nproc_hf ! mpi_enreg%nproc_hf 291 periode(:)=.false.;reorder=.false. 292 call MPI_CART_CREATE(spacecomm,dimcart,sizecart,periode,reorder,commcart_2d,ierr) 293 ABI_DEALLOCATE(periode) 294 ABI_DEALLOCATE(sizecart) 295 296 !* Find the index and coordinates of the current processor 297 call MPI_COMM_RANK(commcart_2d, me_cart_2d, ierr) 298 ABI_ALLOCATE(coords,(dimcart)) 299 call MPI_CART_COORDS(commcart_2d, me_cart_2d,dimcart,coords,ierr) 300 mpi_enreg%me_kpt =coords(1) 301 mpi_enreg%me_hf=coords(2) 302 ABI_DEALLOCATE(coords) 303 304 ABI_ALLOCATE(keepdim,(dimcart)) 305 306 !* Create the communicator for kpt distribution 307 keepdim(1)=.true. 308 keepdim(2)=.false. 309 call MPI_CART_SUB(commcart_2d, keepdim, mpi_enreg%comm_kpt,ierr) 310 311 !* Create the communicator for hf distribution 312 keepdim(1)=.false. 313 keepdim(2)=.true. 314 call MPI_CART_SUB(commcart_2d, keepdim, mpi_enreg%comm_hf,ierr) 315 316 ABI_DEALLOCATE(keepdim) 317 call xmpi_comm_free(commcart_2d) 318 319 !* Write some data 320 write(msg,'(a,2(1x,i0))') 'nphf and npkpt: ',mpi_enreg%nproc_hf, mpi_enreg%nproc_kpt 321 call wrtout(std_out,msg,'COLL') 322 write(msg,'(a,2(1x,i0))') 'me_hf, me_kpt: ',mpi_enreg%me_hf, mpi_enreg%me_kpt 323 end if 324 325 #endif 326 327 DBG_EXIT("COLL") 328 329 end subroutine initmpi_grid