a problem with world_comm that was referenced in mp_global in

an obscure way solved.



git-svn-id: http://qeforge.qe-forge.org/svn/q-e/trunk/espresso@13638 c92efa57-630b-4861-b058-cf58834340f0
This commit is contained in:
degironc 2017-07-28 12:32:24 +00:00
parent 0a215ff87b
commit 3e20b43121
5 changed files with 33 additions and 13 deletions

View File

@ -174,7 +174,6 @@ mp_bands.o : ../UtilXlib/mp.o
mp_bands.o : ../UtilXlib/parallel_include.o
mp_diag.o : ../UtilXlib/mp.o
mp_diag.o : ../UtilXlib/parallel_include.o
mp_diag.o : mp_world.o
mp_exx.o : ../UtilXlib/mp.o
mp_exx.o : ../UtilXlib/parallel_include.o
mp_exx.o : io_global.o

View File

@ -39,15 +39,13 @@ MODULE mp_diag
!
INTEGER :: world_cntx = -1 ! BLACS context of all processor
INTEGER :: ortho_cntx = -1 ! BLACS context for ortho_comm
INTEGER :: world_comm = -1 ! internal copy of the world_comm (-1 is unset, should be set to MPI_COMM_WORLD)
INTEGER :: mpime = 0 ! the global MPI task index (used in clocks) can be set with a mp_rank call
!
LOGICAL :: do_distr_diag_inside_bgrp = .true. ! whether the distributed diagoalization should be performed
! at the band group level (bgrp) or at its parent level
CONTAINS
!
!----------------------------------------------------------------------------
SUBROUTINE mp_start_diag( ndiag_, parent_comm )
SUBROUTINE mp_start_diag( ndiag_, parent_comm, do_distr_diag_inside_bgrp_ )
!---------------------------------------------------------------------------
!
! ... Ortho/diag/linear algebra group initialization
@ -57,6 +55,10 @@ CONTAINS
INTEGER, INTENT(INOUT) :: ndiag_ ! (IN) input number of procs in the diag group, (OUT) actual number
INTEGER, INTENT(IN) :: parent_comm ! parallel communicator inside which the distributed linear algebra group
! communicators are created
LOGICAL, INTENT(IN) :: do_distr_diag_inside_bgrp_ ! comme son nom l'indique
!
INTEGER :: world_comm = -1 ! internal copy of the world_comm (-1 is unset, should be set to MPI_COMM_WORLD)
INTEGER :: mpime = 0 ! the global MPI task index (used in clocks) can be set with a mp_rank call
!
INTEGER :: nproc_ortho_try
INTEGER :: parent_nproc ! nproc of the parent group
@ -65,6 +67,7 @@ CONTAINS
INTEGER :: nparent_comm ! mumber of parent communicators
INTEGER :: ierr = 0
!
write (*,*) 'world_comm ', world_comm , MPI_COMM_WORLD
world_comm = MPI_COMM_WORLD ! set the internal copy of the world_comm to be possibly used in other related routines
world_nproc = mp_size( world_comm ) ! the global number of processors in world_comm
mpime = mp_rank( world_comm ) ! set the global MPI task index (used in clocks)
@ -72,6 +75,9 @@ CONTAINS
my_parent_id = mpime / parent_nproc ! set the index of the current parent communicator
nparent_comm = world_nproc/parent_nproc ! number of paren communicators
! save input value inside the module
do_distr_diag_inside_bgrp = do_distr_diag_inside_bgrp_
!
#if defined __SCALAPACK
np_blacs = mp_size( world_comm )

View File

@ -17,7 +17,7 @@ MODULE mp_global
! ... belonging to each of the various parallelization levels:
! ... use the specific modules instead
!
USE mp_world, ONLY: mp_world_start, mp_world_end
USE mp_world, ONLY: world_comm, mp_world_start, mp_world_end
USE mp_images
USE mp_pools
USE mp_bands
@ -63,6 +63,7 @@ CONTAINS
LOGICAL :: do_diag_in_band
INTEGER :: my_comm, num_groups, group_id
INTEGER :: what_band_group_
LOGICAL :: do_distr_diag_inside_bgrp
!
my_comm = MPI_COMM_WORLD
IF ( PRESENT(my_world_comm) ) my_comm = my_world_comm
@ -96,14 +97,26 @@ CONTAINS
do_diag_in_band = .FALSE.
IF ( PRESENT(diag_in_band_group) ) do_diag_in_band = diag_in_band_group
!
do_distr_diag_inside_bgrp = (negrp.gt.1) .or. do_diag_in_band
if ( do_distr_diag_inside_bgrp ) then
IF( negrp.gt.1 ) THEN
! if using exx groups from mp_exx, revert to the old diag method
num_groups = npool_*nimage_
group_id = my_pool_id + my_image_id * npool_
my_comm = intra_bgrp_comm
else
ELSE IF( do_diag_in_band ) THEN
! used to be one diag group per bgrp
! with strict hierarchy: POOL > BAND > DIAG
num_groups = npool_* nimage_ * nband_
group_id = my_bgrp_id + (my_pool_id + my_image_id * npool_ ) * nband_
my_comm = intra_bgrp_comm
ELSE
! one diag group per pool ( individual k-point level )
! with band group and diag group both being children of POOL comm
num_groups = npool_* nimage_
group_id = my_pool_id + my_image_id * npool_
my_comm = intra_pool_comm
end if
CALL mp_start_diag ( ndiag_, my_comm )
END IF
do_distr_diag_inside_bgrp = (negrp.gt.1) .or. do_diag_in_band
CALL mp_start_diag ( ndiag_, my_comm, do_distr_diag_inside_bgrp )
!
RETURN
!

View File

@ -1,3 +1,5 @@
md5.o :
md5_from_file.o :
memstat.o : ../include/c_defs.h
ptrace.o : ../include/c_defs.h
qmmm_aux.o : ../include/c_defs.h

View File

@ -58,7 +58,7 @@ for dir in $dirs; do
DEPEND3="$LEVEL2/include $LEVEL2/FFTXlib $LEVEL2/LAXlib $LEVEL2/UtilXlib"
case $DIR in
Modules )
DEPENDS="$DEPEND1" ;;
DEPENDS="$DEPEND1 $LEVEL1/UtilXlib" ;;
upftools )
DEPENDS="$DEPEND1 $LEVEL1/Modules" ;;
LR_Modules )