ifdef preprocessor macro refactoring (PWCOND)

git-svn-id: http://qeforge.qe-forge.org/svn/q-e/trunk/espresso@12969 c92efa57-630b-4861-b058-cf58834340f0
This commit is contained in:
spigafi 2016-09-17 14:34:08 +00:00
parent 4ca678b7fb
commit c0af04e2f8
7 changed files with 13 additions and 13 deletions

View File

@ -21,7 +21,7 @@ program pwcond
call do_cond (alldone)
#ifdef __MPI
#if defined(__MPI)
CALL mp_global_end()
#endif

View File

@ -70,7 +70,7 @@ SUBROUTINE do_cond(done)
!
! initialise environment
!
#ifdef __MPI
#if defined(__MPI)
CALL mp_startup ( )
#endif
CALL environment_start ( 'PWCOND' )
@ -187,7 +187,7 @@ SUBROUTINE do_cond(done)
!
END IF
#ifdef __MPI
#if defined(__MPI)
IF (npool > 1) CALL errore('pwcond','pools not implemented',npool)
ik = IAND ( nproc, nproc-1 )
IF ( nproc /= 1 .AND. ik /= 0 ) &

View File

@ -175,7 +175,7 @@ subroutine local_1 (edummy, nrz, vppot, n2d, psibase)
INTEGER :: i, il, j, jl, ig, jg, ipol, &
idx, number, nprob, nteam, nteamnow, &
info, kin, kfin, is, js
#ifdef __MPI
#if defined(__MPI)
INTEGER :: status(MPI_STATUS_SIZE)
#endif
INTEGER, ALLOCATABLE :: fftxy(:,:)
@ -246,7 +246,7 @@ subroutine local_1 (edummy, nrz, vppot, n2d, psibase)
CALL hev_ab(ngper*npol, amat, ngper*npol, el, psiprob, &
-1.d1, edummy+ewind, nprob)
#ifdef __MPI
#if defined(__MPI)
IF ( me_pool.ne.root_pool ) THEN
CALL mpi_send(nprob,1,MPI_INTEGER,0,17, &
MPI_COMM_WORLD,info )
@ -279,7 +279,7 @@ subroutine local_1 (edummy, nrz, vppot, n2d, psibase)
ENDDO
#ifdef __MPI
#if defined(__MPI)
CALL mp_barrier(world_comm)
CALL mp_bcast(n2d,ionode_id, world_comm)
CALL mp_bcast(psibase,ionode_id, world_comm)
@ -390,7 +390,7 @@ subroutine local_2(nrz, nrzp, vppot, psiper, zkr)
ENDDO
#ifdef __MPI
#if defined(__MPI)
CALL mp_barrier(world_comm)
#endif

View File

@ -96,7 +96,7 @@ ENDIF
! To form local potential on the real space mesh
!
!
#ifdef __MPI
#if defined(__MPI)
allocate ( allv(dfftp%nr1x*dfftp%nr2x*dfftp%nr3x) )
#endif
@ -114,7 +114,7 @@ DO ispin=1,nspin_eff
!
! To collect the potential from different CPUs
!
#ifdef __MPI
#if defined(__MPI)
call gather_grid ( dfftp, auxr, allv )
CALL mp_bcast( allv, ionode_id, world_comm )
aux(:) = CMPLX(allv(:), 0.d0,kind=DP)
@ -188,7 +188,7 @@ ENDIF
DEALLOCATE(auxr)
DEALLOCATE(amat)
DEALLOCATE(amat0)
#ifdef __MPI
#if defined(__MPI)
deallocate(allv)
#endif

View File

@ -59,7 +59,7 @@ SUBROUTINE rotproc (fun0, fund0, fun1, fund1, funl0, fundl0, funl1, &
COMPLEX(DP), ALLOCATABLE :: x(:), y(:), amat(:,:), vec(:,:), &
amat_aux(:,:), vec_aux(:,:)
#ifdef __MPI
#if defined(__MPI)
IF(nproc.EQ.1) RETURN

View File

@ -461,7 +461,7 @@ SUBROUTINE scat_states_comp(nchan, nrzp, norb, nocros, taunew, vec, &
rho%of_r(:,:) = 0.d0
call realus_scatt_1(becsum_orig)
do ipol = 1, nspin
#ifdef __MPI
#if defined(__MPI)
CALL gather_grid (dfftp, rho%of_r(:,ipol),spin_mag(:,ipol))
#else
do ig = 1, dfftp%nnr

View File

@ -490,7 +490,7 @@ subroutine scatter_forw(nrz, nrzp, z, psiper, zk, norb, tblm, cros, &
! To construct the functions in the whole rigion zin<z<zfin in the
! case of multiparallel running
!
#ifdef __MPI
#if defined(__MPI)
CALL rotproc(fun0, fund0, fun1, fund1, funl0, fundl0, funl1, &
fundl1, intw1, intw2, n2d, norbf, norb, nrzp)
#endif