Variables for task group (actually a single one) moved to where it belongs

(I think): mp_bands.f90 . Many changes but nothing dangerous. Note that 
codes not in svn may be broken by this change, but the fix is very simple


git-svn-id: http://qeforge.qe-forge.org/svn/q-e/trunk/espresso@10567 c92efa57-630b-4861-b058-cf58834340f0
This commit is contained in:
giannozz 2013-11-03 19:16:37 +00:00
parent 6becfe1a31
commit 3fcc01b467
24 changed files with 123 additions and 148 deletions

View File

@ -53,14 +53,15 @@ MODULE cp_restart
USE control_flags, ONLY : tksw, lwfpbe0nscf, lwfnscf ! Lingzhu Kong
USE io_files, ONLY : psfile, pseudo_dir, iunwfc, &
nwordwfc, tmp_dir, diropn
USE mp_global, ONLY : intra_image_comm, me_image, &
nproc_image, &
nproc_pool, intra_pool_comm, &
me_bgrp, nproc_bgrp, &
USE mp_images, ONLY : intra_image_comm, me_image, &
nproc_image
USE mp_pools, ONLY : nproc_pool, intra_pool_comm
USE mp_bands, ONLY : me_bgrp, nproc_bgrp, &
my_bgrp_id, intra_bgrp_comm, &
inter_bgrp_comm, root_bgrp
USE mp_global, ONLY : nproc_pot, nproc_bgrp, nproc_ortho, &
get_ntask_groups
inter_bgrp_comm, root_bgrp, &
ntask_groups
USE mp_pots, ONLY : nproc_pot
USE mp_diag, ONLY : nproc_ortho
USE mp_world, ONLY : world_comm, nproc
USE run_info, ONLY : title
USE gvect, ONLY : ngm, ngm_g
@ -158,7 +159,6 @@ MODULE cp_restart
CHARACTER(LEN=256) :: wfc_dir
LOGICAL :: exst
INTEGER :: inlc
INTEGER :: ntask_groups
!
! ... subroutine body
!
@ -400,7 +400,6 @@ MODULE cp_restart
! ... PARALLELISM
!-------------------------------------------------------------------------------
!
ntask_groups=get_ntask_groups()
CALL write_para( kunit, nproc, nproc_pool, nproc_image, ntask_groups, &
nproc_pot, nproc_bgrp, nproc_ortho )
!

View File

@ -42,8 +42,8 @@
USE electrons_module, ONLY: bmeshset
USE electrons_base, ONLY: distribute_bands
USE problem_size, ONLY: cpsizes
USE mp_global, ONLY: me_bgrp, root_bgrp, nproc_bgrp, nbgrp, my_bgrp_id, intra_bgrp_comm
USE mp_global, ONLY: get_ntask_groups
USE mp_bands, ONLY: me_bgrp, root_bgrp, nproc_bgrp, nbgrp, &
my_bgrp_id, intra_bgrp_comm, ntask_groups
USE uspp, ONLY: okvan, nlcc_any
implicit none
@ -51,7 +51,7 @@
integer :: i
real(dp) :: rat1, rat2, rat3
real(dp) :: bg(3,3), tpiba2
integer :: ng_, ngs_, ngm_ , ngw_ , nogrp_
integer :: ng_, ngs_, ngm_ , ngw_
CALL start_clock( 'init_dim' )
@ -109,10 +109,10 @@
! ... set the sticks mesh and distribute g vectors among processors
! ... pstickset lso sets the local real-space grid dimensions
!
nogrp_ = get_ntask_groups()
CALL pstickset( gamma_only, bg, gcutm, gkcut, gcutms, &
dfftp, dffts, ngw_ , ngm_ , ngs_ , me_bgrp, root_bgrp, nproc_bgrp, intra_bgrp_comm, nogrp_ )
dfftp, dffts, ngw_ , ngm_ , ngs_ , me_bgrp, root_bgrp, &
nproc_bgrp, intra_bgrp_comm, ntask_groups )
!
!
! ... Initialize reciprocal space local and global dimensions

View File

@ -34,10 +34,9 @@ USE ions_base, ONLY: na !number of atoms within each at
USE ions_base, ONLY: ityp !ityp(i):=type/species of ith atom
USE ions_base, ONLY: atm !atm(j):=name of jth atomic species (3 characters)
USE kinds, ONLY: DP !double-precision kind (selected_real_kind(14,200))
USE mp_global, ONLY: nproc_image !number of processors
USE mp_global, ONLY: me_image !processor number (0,1,...,nproc_image-1)
USE mp_global, ONLY: intra_image_comm !standard MPI communicator
USE mp_global, ONLY: get_ntask_groups !retrieve number of task groups
USE mp_images, ONLY: nproc_image !number of processors
USE mp_images, ONLY: me_image !processor number (0,1,...,nproc_image-1)
USE mp_images, ONLY: intra_image_comm !standard MPI communicator
USE mp_world, ONLY: world_comm !world communicator, not the same as MPI_COMM_WORLD!
USE mp, ONLY: mp_sum !MPI collection with sum
USE parallel_include !MPI header

View File

@ -54,11 +54,11 @@ SUBROUTINE phq_readin()
USE ldaU, ONLY : lda_plus_u
USE control_flags, ONLY : iverbosity, modenum, twfcollect,io_level
USE io_global, ONLY : ionode, stdout
USE mp_global, ONLY : nproc_pool, nproc_pool_file, &
nimage, my_image_id, &
nproc_image_file, nproc_image, npool, &
get_ntask_groups, ntask_groups_file, &
nproc_bgrp_file
USE mp_global, ONLY : nproc_pool_file, nproc_image_file, &
ntask_groups_file, nproc_bgrp_file
USE mp_images, ONLY : nimage, my_image_id, nproc_image
USE mp_pools, ONLY : nproc_pool, npool
USE mp_bands, ONLY : ntask_groups
USE paw_variables, ONLY : okpaw
USE ramanm, ONLY : eth_rps, eth_ns, lraman, elop, dek
USE freq_ph, ONLY : fpol, fiu, nfs
@ -499,7 +499,7 @@ SUBROUTINE phq_readin()
CALL errore('phq_readin',&
'pw.x run with a different number of pools. Use wf_collect=.true.',1)
IF (get_ntask_groups() > 1) &
IF (ntask_groups > 1) &
CALL errore('phq_readin','task_groups not available in phonon',1)
IF (nproc_bgrp_file /= nproc_pool_file) &

View File

@ -13,12 +13,11 @@ MODULE environment
USE kinds, ONLY: DP
USE io_files, ONLY: crash_file, nd_nmbr
USE io_global, ONLY: stdout, meta_ionode
USE mp_global, ONLY: get_ntask_groups
USE mp_world, ONLY: nproc
USE mp_images, ONLY: me_image, my_image_id, root_image, nimage, &
nproc_image
USE mp_pools, ONLY: npool
USE mp_bands, ONLY: nproc_bgrp, nbgrp
USE mp_bands, ONLY: ntask_groups, nproc_bgrp, nbgrp
USE global_version, ONLY: version_number, svn_revision
IMPLICIT NONE
@ -205,9 +204,9 @@ CONTAINS
'(5X,"band groups division: nbgrp = ",I7)' ) nbgrp
IF ( nproc_bgrp > 1 ) WRITE( stdout, &
'(5X,"R & G space division: proc/nbgrp/npool/nimage = ",I7)' ) nproc_bgrp
IF ( get_ntask_groups() > 1 ) WRITE( stdout, &
IF ( ntask_groups > 1 ) WRITE( stdout, &
'(5X,"wavefunctions fft division: fft and procs/group = ",2I7)' ) &
get_ntask_groups(), nproc_bgrp / get_ntask_groups()
ntask_groups, nproc_bgrp / ntask_groups
!
END SUBROUTINE parallel_info

View File

@ -26,6 +26,10 @@ MODULE mp_bands
INTEGER :: inter_bgrp_comm = 0 ! inter band group communicator
INTEGER :: intra_bgrp_comm = 0 ! intra band group communicator
!
! ... "task" groups (for band parallelization of FFT)
!
INTEGER :: ntask_groups = 1 ! number of proc. in an orbital "task group"
!
! ... The following variables not set during initialization but later
!
INTEGER :: ibnd_start = 0 ! starting band index
@ -34,7 +38,7 @@ MODULE mp_bands
CONTAINS
!
!----------------------------------------------------------------------------
SUBROUTINE mp_start_bands( nband_, parent_comm )
SUBROUTINE mp_start_bands( nband_, ntg_, parent_comm )
!---------------------------------------------------------------------------
!
! ... Divide processors (of the "parent_comm" group) into bands pools
@ -45,6 +49,7 @@ CONTAINS
IMPLICIT NONE
!
INTEGER, INTENT(IN) :: nband_, parent_comm
INTEGER, INTENT(IN), OPTIONAL :: ntg_
!
INTEGER :: parent_nproc = 1, parent_mype = 0, ierr = 0
!
@ -93,6 +98,10 @@ CONTAINS
IF ( ierr /= 0 ) CALL errore( 'init_bands', &
'inter band group communicator initialization', ABS(ierr) )
!
IF ( PRESENT(ntg_) ) THEN
ntask_groups = ntg_
END IF
!
#endif
RETURN
!

View File

@ -10,8 +10,8 @@ MODULE mp_global
!----------------------------------------------------------------------------
!
! ... Wrapper module, for compatibility. Contains a few "leftover" variables
! ... used for checks (all the *_file variables, read from data file) and for
! ... "task groups", plus the routine mp_startup initializing MPI, plus the
! ... used for checks (all the *_file variables, read from data file),
! ... plus the routine mp_startup initializing MPI, plus the
! ... routine mp_global_end stopping MPI.
! ... Do not use this module to reference variables (e.g. communicators)
! ... belonging to each of the various parallelization levels:
@ -37,11 +37,6 @@ MODULE mp_global
INTEGER :: nproc_bgrp_file = 1
INTEGER :: ntask_groups_file= 1
!
! ... "task" groups (for band parallelization of FFT)
!
INTEGER :: ntask_groups = 1 ! number of proc. in an orbital "task group"
PRIVATE :: ntask_groups
!
CONTAINS
!
!-----------------------------------------------------------------------
@ -80,8 +75,7 @@ CONTAINS
!
CALL mp_start_pots ( npot_, intra_image_comm )
CALL mp_start_pools ( npool_, intra_image_comm )
CALL mp_start_bands ( nband_, intra_pool_comm )
ntask_groups = ntg_
CALL mp_start_bands ( nband_, ntg_, intra_pool_comm )
CALL mp_start_diag ( ndiag_, intra_bgrp_comm )
!
RETURN
@ -96,13 +90,4 @@ CONTAINS
!
END SUBROUTINE mp_global_end
!
!-----------------------------------------------------------------------
FUNCTION get_ntask_groups()
!-----------------------------------------------------------------------
IMPLICIT NONE
INTEGER :: get_ntask_groups
get_ntask_groups = ntask_groups
RETURN
END FUNCTION get_ntask_groups
!
END MODULE mp_global

View File

@ -23,7 +23,7 @@ SUBROUTINE d3_readin()
USE noncollin_module, ONLY : noncolin
USE io_files, ONLY : tmp_dir, prefix
USE io_global, ONLY : ionode, ionode_id
USE mp_global, ONLY : nbgrp, get_ntask_groups
USE mp_bands, ONLY : nbgrp, ntask_groups
USE mp, ONLY : mp_bcast
USE mp_world, ONLY : world_comm
!
@ -139,16 +139,13 @@ SUBROUTINE d3_readin()
IF (okvan) CALL errore ('d3_readin', 'US not implemented', 1)
IF (noncolin) call errore('d3_readin', &
'd3 is not working in the noncolinear case', 1)
!
IF (get_ntask_groups() > 1) dffts%have_task_groups=.FALSE.
!
! band group not available
!
!
IF (ntask_groups > 1) dffts%have_task_groups=.FALSE.
!
! band group not available
!
IF (nbgrp /=1 ) &
CALL errore('d3_readin','band parallelization not available',1)
!
! There might be other variables in the input file which describe
! partial computation of the dynamical matrix. Read them here

View File

@ -19,7 +19,7 @@ SUBROUTINE cg_readin()
USE io_files, ONLY : tmp_dir, prefix
USE io_global, ONLY : ionode, ionode_id
USE noncollin_module, ONLY : noncolin
USE mp_global, ONLY : nbgrp, get_ntask_groups
USE mp_bands, ONLY : nbgrp, ntask_groups
USE mp, ONLY : mp_bcast
USE mp_world, ONLY : world_comm
!
@ -92,13 +92,13 @@ SUBROUTINE cg_readin()
!
IF (.not. gamma_only) CALL errore('cg_readin', &
'need pw.x data file produced using Gamma tricks',1)
!
! Task groups not used.
!
IF (get_ntask_groups() > 1) dffts%have_task_groups=.FALSE.
!
! band group not available
!
!
! Task groups not used.
!
IF (ntask_groups > 1) dffts%have_task_groups=.FALSE.
!
! band group not available
!
IF (nbgrp /=1 ) &
CALL errore('cg_readin','band parallelization not available',1)

View File

@ -30,7 +30,7 @@ SUBROUTINE ch_psi_all (n, h, ah, e, ik, m)
USE eqv, ONLY : evq
USE qpoint, ONLY : ikqs
USE mp_global, ONLY : intra_bgrp_comm, get_ntask_groups
USE mp_bands, ONLY : intra_bgrp_comm, ntask_groups
USE mp, ONLY : mp_sum
!Needed only for TDDFPT
@ -66,10 +66,10 @@ SUBROUTINE ch_psi_all (n, h, ah, e, ik, m)
INTEGER, ALLOCATABLE :: ibuf(:)
CALL start_clock ('ch_psi')
!
! This routine is task groups aware
!
IF (get_ntask_groups() > 1) dffts%have_task_groups=.TRUE.
!
! This routine is task groups aware
!
IF (ntask_groups > 1) dffts%have_task_groups=.TRUE.
ALLOCATE (ps ( nbnd , m))
ALLOCATE (hpsi( npwx*npol , m))

View File

@ -275,7 +275,8 @@ SUBROUTINE elphel (irr, npe, imode0, dvscfins)
USE control_ph, ONLY : trans, lgamma, current_iq
USE ph_restart, ONLY : ph_writefile
USE spin_orb, ONLY : domag
USE mp_global, ONLY: intra_bgrp_comm, npool, get_ntask_groups
USE mp_bands, ONLY: intra_bgrp_comm, ntask_groups
USE mp_pools, ONLY: npool
USE mp, ONLY: mp_sum
IMPLICIT NONE
@ -291,7 +292,7 @@ SUBROUTINE elphel (irr, npe, imode0, dvscfins)
COMPLEX(DP), EXTERNAL :: zdotc
!
IF (.NOT. comp_elph(irr) .OR. done_elph(irr)) RETURN
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
ALLOCATE (aux1 (dffts%nnr, npol))
ALLOCATE (elphmat ( nbnd , nbnd , npe))
@ -359,7 +360,7 @@ SUBROUTINE elphel (irr, npe, imode0, dvscfins)
!
! calculate dvscf_q*psi_k
!
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
IF ( dffts%have_task_groups ) THEN
IF (noncolin) THEN
CALL tg_cgather( dffts, dvscfins(:,1,ipert), tg_dv(:,1))
@ -432,7 +433,7 @@ SUBROUTINE elphel (irr, npe, imode0, dvscfins)
DEALLOCATE (elphmat)
DEALLOCATE (aux1)
DEALLOCATE (aux2)
IF ( get_ntask_groups() > 1) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1) dffts%have_task_groups=.TRUE.
IF ( dffts%have_task_groups ) THEN
DEALLOCATE( tg_dv )
DEALLOCATE( tg_psic )

View File

@ -25,7 +25,7 @@ subroutine incdrhoscf (drhoscf, weight, ik, dbecsum, dpsi)
USE wavefunctions_module, ONLY: evc
USE qpoint, ONLY : npwq, igkq, ikks
USE control_ph, ONLY : nbnd_occ
USE mp_global, ONLY : me_bgrp, inter_bgrp_comm, get_ntask_groups
USE mp_bands, ONLY : me_bgrp, inter_bgrp_comm, ntask_groups
USE mp, ONLY : mp_sum
@ -55,7 +55,7 @@ subroutine incdrhoscf (drhoscf, weight, ik, dbecsum, dpsi)
! counters
call start_clock ('incdrhoscf')
IF (get_ntask_groups() > 1) dffts%have_task_groups=.TRUE.
IF (ntask_groups > 1) dffts%have_task_groups=.TRUE.
allocate (dpsic( dffts%nnr))
allocate (psi ( dffts%nnr))
wgt = 2.d0 * weight / omega

View File

@ -28,7 +28,7 @@ subroutine incdrhoscf_nc (drhoscf, weight, ik, dbecsum, dpsi)
USE wavefunctions_module, ONLY: evc
USE qpoint, ONLY : npwq, igkq, ikks
USE control_ph, ONLY : nbnd_occ
USE mp_global, ONLY : me_bgrp, inter_bgrp_comm, get_ntask_groups
USE mp_bands, ONLY : me_bgrp, inter_bgrp_comm, ntask_groups
USE mp, ONLY : mp_sum
implicit none
@ -61,7 +61,7 @@ subroutine incdrhoscf_nc (drhoscf, weight, ik, dbecsum, dpsi)
! counters
call start_clock ('incdrhoscf')
IF (get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF (ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
allocate (dpsic(dffts%nnr, npol))
allocate (psi (dffts%nnr, npol))
allocate (aux(dfftp%nnr,nspin_mag))

View File

@ -57,10 +57,10 @@ SUBROUTINE phq_readin()
USE io_global, ONLY : meta_ionode, ionode, ionode_id, stdout
USE mp_images, ONLY : nimage, my_image_id, intra_image_comm, &
me_image, nproc_image
USE mp_global, ONLY : get_ntask_groups, nproc_pool_file, &
USE mp_global, ONLY : nproc_pool_file, &
nproc_bgrp_file, nproc_image_file
USE mp_pools, ONLY : nproc_pool, npool
USE mp_bands, ONLY : nproc_bgrp
USE mp_bands, ONLY : nproc_bgrp, ntask_groups
USE paw_variables, ONLY : okpaw
USE ramanm, ONLY : eth_rps, eth_ns, lraman, elop, dek
USE freq_ph, ONLY : fpol, fiu, nfs
@ -599,11 +599,10 @@ SUBROUTINE phq_readin()
IF (nproc_pool /= nproc_pool_file .and. .not. twfcollect) &
CALL errore('phq_readin',&
'pw.x run with a different number of pools. Use wf_collect=.true.',1)
!
! Task groups not used in phonon. Activated only in some places
!
IF (get_ntask_groups() > 1) dffts%have_task_groups=.FALSE.
!
! Task groups not used in phonon. Activated only in some places
!
IF (ntask_groups > 1) dffts%have_task_groups=.FALSE.
IF (nproc_bgrp_file /= nproc_bgrp .AND. .NOT. twfcollect) &
CALL errore('phq_readin','pw.x run with different band parallelization',1)

View File

@ -32,7 +32,7 @@ SUBROUTINE run_nscf(do_band, iq)
USE grid_irr_iq, ONLY : done_bands
USE acfdtest, ONLY : acfdt_is_active, acfdt_num_der, ir_point, delta_vrs
USE scf, ONLY : vrs
USE mp_global, ONLY : get_ntask_groups
USE mp_bands, ONLY : ntask_groups
!
IMPLICIT NONE
@ -111,12 +111,12 @@ SUBROUTINE run_nscf(do_band, iq)
!
bands_computed=.TRUE.
!
! PWscf has run with task groups if available, but in the phonon
! they are not used, apart in particular points. In that case it is
! activated.
!
IF (get_ntask_groups()>1) dffts%have_task_groups=.FALSE.
!
! PWscf has run with task groups if available, but in the phonon
! they are not used, apart in particular points, where they are
! activated.
!
IF (ntask_groups > 1) dffts%have_task_groups=.FALSE.
!
CALL stop_clock( 'PWSCF' )
!

View File

@ -54,8 +54,8 @@ subroutine solve_e
USE qpoint, ONLY : npwq, nksq
USE recover_mod, ONLY : read_rec, write_rec
USE mp_global, ONLY : inter_pool_comm, intra_bgrp_comm, &
get_ntask_groups
USE mp_pools, ONLY : inter_pool_comm
USE mp_bands, ONLY : intra_bgrp_comm, ntask_groups
USE mp, ONLY : mp_sum
implicit none
@ -96,10 +96,10 @@ subroutine solve_e
external ch_psi_all, cg_psi
call start_clock ('solve_e')
!
! This routine is task group aware
!
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
!
! This routine is task group aware
!
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
allocate (dvscfin( dfftp%nnr, nspin_mag, 3))
if (doublegrid) then
@ -219,7 +219,7 @@ subroutine solve_e
! calculates dvscf_q*psi_k in G_space, for all bands, k=kpoint
! dvscf_q from previous iteration (mix_potential)
!
IF ( get_ntask_groups() > 1) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1) dffts%have_task_groups=.TRUE.
IF( dffts%have_task_groups ) THEN
IF (noncolin) THEN
CALL tg_cgather( dffts, dvscfins(:,1,ipol), &
@ -439,7 +439,7 @@ subroutine solve_e
deallocate (dvscfin)
if (noncolin) deallocate(dbecsum_nc)
deallocate(aux2)
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
IF ( dffts%have_task_groups ) THEN
!
DEALLOCATE( tg_dv )

View File

@ -64,8 +64,8 @@ SUBROUTINE solve_linter (irr, imode0, npe, drhoscf)
USE dfile_autoname, ONLY : dfile_name
USE save_ph, ONLY : tmp_dir_save
! used oly to write the restart file
USE mp_global, ONLY : inter_pool_comm, intra_bgrp_comm, &
get_ntask_groups, me_bgrp
USE mp_pools, ONLY : inter_pool_comm
USE mp_bands, ONLY : intra_bgrp_comm, ntask_groups, me_bgrp
USE mp, ONLY : mp_sum
!
implicit none
@ -141,7 +141,7 @@ SUBROUTINE solve_linter (irr, imode0, npe, drhoscf)
!
! This routine is task group aware
!
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
allocate (dvscfin ( dfftp%nnr , nspin_mag , npe))
if (doublegrid) then
@ -298,7 +298,7 @@ SUBROUTINE solve_linter (irr, imode0, npe, drhoscf)
! dvscf_q from previous iteration (mix_potential)
!
call start_clock ('vpsifft')
IF ( get_ntask_groups() > 1 ) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1 ) dffts%have_task_groups=.TRUE.
IF( dffts%have_task_groups ) THEN
IF (noncolin) THEN
CALL tg_cgather( dffts, dvscfins(:,1,ipert), &
@ -598,7 +598,7 @@ SUBROUTINE solve_linter (irr, imode0, npe, drhoscf)
if (doublegrid) deallocate (dvscfins)
deallocate (dvscfin)
deallocate(aux2)
IF ( get_ntask_groups() > 1) dffts%have_task_groups=.TRUE.
IF ( ntask_groups > 1) dffts%have_task_groups=.TRUE.
IF ( dffts%have_task_groups ) THEN
DEALLOCATE( tg_dv )
DEALLOCATE( tg_psic )

View File

@ -30,7 +30,7 @@ SUBROUTINE add_bfield (v,rho)
USE cell_base, ONLY : omega
USE fft_base, ONLY : dfftp
USE lsda_mod, ONLY : nspin
USE mp_global, ONLY : intra_bgrp_comm
USE mp_bands, ONLY : intra_bgrp_comm
USE mp, ONLY : mp_sum
USE noncollin_module, ONLY : bfield, lambda, i_cons, mcons, &
pointlist, factlist, noncolin

View File

@ -54,7 +54,8 @@ SUBROUTINE add_efield(vpoten,etotefield,rho,iflag)
USE io_global, ONLY : stdout,ionode
USE control_flags, ONLY : mixing_beta
USE lsda_mod, ONLY : nspin
USE mp_global, ONLY : intra_image_comm, me_bgrp, intra_bgrp_comm
USE mp_images, ONLY : intra_image_comm
USE mp_bands, ONLY : me_bgrp
USE fft_base, ONLY : dfftp
USE mp, ONLY : mp_bcast, mp_sum
USE control_flags, ONLY : iverbosity
@ -115,10 +116,7 @@ SUBROUTINE add_efield(vpoten,etotefield,rho,iflag)
CALL compute_ion_dip(emaxpos, eopreg, edir, ion_dipole)
tot_dipole = -e_dipole + ion_dipole
#ifdef __MPI
CALL mp_bcast(tot_dipole, 0, intra_image_comm)
#endif
!
! E_{TOT} = -e^{2} \left( eamp - dip \right) dip \frac{\Omega}{4\pi}
!

View File

@ -23,7 +23,7 @@ subroutine addusforce (forcenl)
USE scf, ONLY : v, vltot
USE uspp, ONLY : becsum, okvan
USE uspp_param, ONLY : upf, lmaxq, nh, nhm
USE mp_global, ONLY : intra_bgrp_comm
USE mp_bands, ONLY : intra_bgrp_comm
USE mp, ONLY : mp_sum
USE control_flags, ONLY : gamma_only
USE fft_interfaces,ONLY : fwfft
@ -122,9 +122,7 @@ subroutine addusforce (forcenl)
endif
enddo
#ifdef __MPI
call mp_sum ( ddeeq, intra_bgrp_comm )
#endif
! WRITE( stdout,'( "dmatrix atom ",i4)') na
! do ih = 1, nh(nt)
! WRITE( stdout,'(8f9.4)') (ddeeq(ipol,ih,jh,na),jh=1,nh(nt))

View File

@ -37,7 +37,7 @@ subroutine atomic_rho (rhoa, nspina)
USE wavefunctions_module, ONLY : psic
USE noncollin_module, ONLY : angle1, angle2
USE uspp_param, ONLY : upf
USE mp_global, ONLY : intra_bgrp_comm
USE mp_bands, ONLY : intra_bgrp_comm
USE mp, ONLY : mp_sum
USE fft_base, ONLY : dfftp
USE fft_interfaces, ONLY : invfft

View File

@ -15,9 +15,9 @@ SUBROUTINE data_structure( gamma_only )
!
USE kinds, ONLY : DP
USE mp, ONLY : mp_max
USE mp_global, ONLY : me_bgrp, nproc_bgrp, root_bgrp, intra_bgrp_comm, &
inter_pool_comm
USE mp_global, ONLY : get_ntask_groups
USE mp_bands, ONLY : me_bgrp, nproc_bgrp, root_bgrp, intra_bgrp_comm, &
ntask_groups
USE mp_pools, ONLY : inter_pool_comm
USE fft_base, ONLY : dfftp, dffts
USE cell_base, ONLY : bg, tpiba
USE klist, ONLY : xk, nks
@ -29,7 +29,7 @@ SUBROUTINE data_structure( gamma_only )
IMPLICIT NONE
LOGICAL, INTENT(in) :: gamma_only
REAL (DP) :: gkcut
INTEGER :: ik, ngm_, ngs_, ngw_, nogrp
INTEGER :: ik, ngm_, ngs_, ngw_
!
! ... calculate gkcut = max |k+G|^2, in (2pi/a)^2 units
!
@ -56,11 +56,9 @@ SUBROUTINE data_structure( gamma_only )
!
! ... set up fft descriptors, including parallel stuff: sticks, planes, etc.
!
nogrp = get_ntask_groups()
!
CALL pstickset( gamma_only, bg, gcutm, gkcut, gcutms, &
dfftp, dffts, ngw_ , ngm_ , ngs_ , me_bgrp, &
root_bgrp, nproc_bgrp, intra_bgrp_comm, nogrp )
root_bgrp, nproc_bgrp, intra_bgrp_comm, ntask_groups )
!
! on output, ngm_ and ngs_ contain the local number of G-vectors
! for the two grids. Initialize local and global number of G-vectors

View File

@ -1,4 +1,3 @@
!
! Copyright (C) 2001-2012 Quantum ESPRESSO group
! This file is distributed under the terms of the
@ -17,9 +16,8 @@ SUBROUTINE data_structure_custom(fc, gamma_only)
USE cell_base, ONLY : bg, tpiba, tpiba2
USE klist, ONLY : xk, nks
USE mp, ONLY : mp_sum, mp_max,mp_barrier
USE mp_global, ONLY : me_bgrp, nproc_bgrp, inter_bgrp_comm, &
intra_bgrp_comm, root_bgrp
USE mp_global, ONLY : get_ntask_groups
USE mp_bands, ONLY : me_bgrp, nproc_bgrp, inter_bgrp_comm, &
intra_bgrp_comm, root_bgrp, ntask_groups
USE stick_set, ONLY : pstickset_custom
USE fft_custom, ONLY : fft_cus, gvec_init
USE fft_base, ONLY : dfftp
@ -50,26 +48,24 @@ SUBROUTINE data_structure_custom(fc, gamma_only)
inter_comm = inter_bgrp_comm
intra_comm = intra_bgrp_comm
root = root_bgrp
nogrp = ntask_groups
nogrp = get_ntask_groups()
IF (nks == 0) THEN
!
! if k-points are automatically generated (which happens later)
! use max(bg)/2 as an estimate of the largest k-point
!
gkcut = 0.5d0 * MAX ( &
IF (nks == 0) THEN
!
! if k-points are automatically generated (which happens later)
! use max(bg)/2 as an estimate of the largest k-point
!
gkcut = 0.5d0 * MAX ( &
&SQRT (SUM(bg (1:3, 1)**2) ), &
&SQRT (SUM(bg (1:3, 2)**2) ), &
&SQRT (SUM(bg (1:3, 3)**2) ) )
ELSE
gkcut = 0.0d0
DO kpoint = 1, nks
gkcut = MAX (gkcut, SQRT ( SUM(xk (1:3, kpoint)**2) ) )
ENDDO
ENDIF
gkcut = (SQRT (fc%ecutt) / tpiba + gkcut)**2
ELSE
gkcut = 0.0d0
DO kpoint = 1, nks
gkcut = MAX (gkcut, SQRT ( SUM(xk (1:3, kpoint)**2) ) )
ENDDO
ENDIF
gkcut = (SQRT (fc%ecutt) / tpiba + gkcut)**2
!
! ... find maximum value among all the processors
!
@ -77,11 +73,9 @@ SUBROUTINE data_structure_custom(fc, gamma_only)
!
! ... set up fft descriptors, including parallel stuff: sticks, planes, etc.
!
nogrp = get_ntask_groups()
!
CALL pstickset_custom( gamma_only, bg, gcutm, gkcut, fc%gcutmt, &
dfftp, fc%dfftt, ngw_ , ngm_, ngs_, me, root, nproc, intra_comm, &
nogrp )
dfftp, fc%dfftt, ngw_ , ngm_, ngs_, me, root, nproc, &
intra_comm, nogrp )
!
! on output, ngm_ and ngs_ contain the local number of G-vectors
! for the two grids. Initialize local and global number of G-vectors

View File

@ -118,13 +118,13 @@ MODULE pw_restart
USE extfield, ONLY : tefield, dipfield, edir, &
emaxpos, eopreg, eamp
USE io_rho_xml, ONLY : write_rho
USE mp_global, ONLY : get_ntask_groups
USE mp_world, ONLY : nproc
USE mp_images, ONLY : nproc_image
USE mp_pools, ONLY : kunit, nproc_pool, me_pool, root_pool, &
intra_pool_comm, inter_pool_comm
USE mp_bands, ONLY : nproc_bgrp, me_bgrp, root_bgrp, &
intra_bgrp_comm, inter_bgrp_comm, nbgrp
intra_bgrp_comm, inter_bgrp_comm, &
nbgrp, ntask_groups
USE mp_pots, ONLY : nproc_pot
USE mp_diag, ONLY : nproc_ortho
USE funct, ONLY : get_exx_fraction, dft_is_hybrid, &
@ -148,7 +148,7 @@ MODULE pw_restart
CHARACTER(LEN=256) :: dirname, filename
INTEGER :: i, ig, ik, ngg, ierr, ipol, ik_eff, num_k_points
INTEGER :: npool, nkbl, nkl, nkr, npwx_g
INTEGER :: ike, iks, npw_g, ispin, inlc, ntask_groups
INTEGER :: ike, iks, npw_g, ispin, inlc
INTEGER, ALLOCATABLE :: ngk_g(:)
INTEGER, ALLOCATABLE :: igk_l2g(:,:), igk_l2g_kdip(:,:), mill_g(:,:)
LOGICAL :: lwfc, lrho
@ -451,7 +451,6 @@ MODULE pw_restart
! ... PARALLELISM
!-------------------------------------------------------------------------------
!
ntask_groups=get_ntask_groups()
CALL write_para( kunit, nproc, nproc_pool, nproc_image, ntask_groups,&
nproc_pot, nproc_bgrp, nproc_ortho )
!