Update nightly testing versions

This commit is contained in:
Paul Kent 2021-03-24 14:06:54 -04:00
parent baf31d3b8b
commit 4b8f5c1b2c
7 changed files with 230 additions and 75 deletions

View File

@ -24,23 +24,23 @@ performance and easiest configuration.
Nightly testing currently includes the following software versions on x86:
* Compilers
* GCC 10.2.0, 7.3.0
* Clang/LLVM 10.0.1, 6.0.1
* GCC 10.2.0, 8.3.0
* Clang/LLVM 11.0.1
* Intel 19.1.1.217 configured to use C++ library from GCC 8.3.0
* PGI 19.4 configured to use C++ library from GCC 8.3.0
* Boost 1.74.0, 1.68.0
* HDF5 1.10.5, 1.8.19
* PGI/NVIDIA HPC SDK 20.9 configured to use C++ library from GCC 8.3.0
* Boost 1.75.0, 1.68.0
* HDF5 1.12.0, 1.8.19
* FFTW 3.3.8, 3.3.4
* CMake 3.18.2, 3.12.1
* CMake 3.19.5, 3.13.2
* MPI
* OpenMPI 4.0.4, 3.1.2
* OpenMPI 4.1.0, 3.1.6
* Intel MPI 19.1.1.217
* CUDA 10.2.89
* CUDA 11.2.1
Workflow tests are performed with Quantum Espresso v6.4.1 and PySCF v1.7.4. These check trial wavefunction generation and
Workflow tests are performed with Quantum Espresso v6.7.0 and PySCF v1.7.5. These check trial wavefunction generation and
conversion through to actual QMC runs.
On a developmental basis we also check the latest Clang development version, AMD AOMP and Intel OneAPI compilers.
On a developmental basis we also check the latest Clang and GCC development versions, AMD AOMP and Intel OneAPI compilers.
# Building with CMake

View File

@ -23,14 +23,18 @@ esac
if [[ $localonly == "yes" ]]; then
echo --- Local CMake/Make/CTest only. No cdash drop.
echo --- Local CMake/Make/CTest only. No cdash drop.
fi
if [ -e `dirname "$0"`/ornl_update.sh ]; then
echo --- Updates
source `dirname "$0"`/ornl_update.sh
if [[ $jobtype == "weekly" ]]; then
if [ -e `dirname "$0"`/ornl_update.sh ]; then
echo --- Running compiler updates
source `dirname "$0"`/ornl_update.sh
fi
fi
if [ -e `dirname "$0"`/ornl_versions.sh ]; then
source `dirname "$0"`/ornl_versions.sh
else
@ -62,16 +66,16 @@ echo --- Host is $ourhostname
case "$ourhostname" in
sulfur )
if [[ $jobtype == "nightly" ]]; then
buildsys="build_gccnew build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gccnew_nompi_mkl build_gccold_nompi_mkl build_clangnew_nompi_mkl build_gccnew_nompi build_clangnew_nompi build_gccnew_mkl build_gccnew_mkl_complex build_clangnew_mkl build_clangnew_mkl_complex build_clangnew_mkl_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gcclegacycuda_full build_pgi2020_nompi build_clangdev_nompi_mkl"
buildsys="build_gccnew build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gccnew_nompi_mkl build_gccold_nompi_mkl build_clangnew_nompi_mkl build_gccnew_nompi build_clangnew_nompi build_gccnew_mkl build_gccnew_mkl_complex build_gccdev_mkl build_gccdev_mkl_complex build_clangnew_mkl build_clangnew_mkl_complex build_clangnew_mkl_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gcclegacycuda_full build_pgi2020_nompi build_clangdev_nompi_mkl build_clangdev_nompi_mkl_complex build_clangdev_offloadcuda_nompi_mkl build_clangdev_offloadcuda_nompi_mkl_complex build_gccnew_debug_mkl build_gccnew_debug_complex_mkl"
else
buildsys="build_gccnew_mkl_nompi build_clangnew_mkl_nompi build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gcclegacycuda build_gcclegacycuda_complex build_pgi2020_nompi"
fi
;;
nitrogen )
if [[ $jobtype == "nightly" ]]; then
buildsys="build_gccnew build_pgi2020_nompi build_clangdev_nompi build_clangdev_offloadcuda_nompi build_gccnew_nompi build_gccnew_nompi_complex build_clangnew build_clangnew_complex build_clangnew_mixed build_clangnew_complex_mixed build_aompnew_nompi build_aompnew build_aompnew_nompi_mixed build_aompnew_mixed build_aompnew_nompi_complex_mixed build_aompnew_complex_mixed build_aompnew_nompi_complex build_aompnew_complex build_gcclegacycuda build_gcclegacycuda_full build_gcclegacycuda_complex build_gccnew_complex"
buildsys="build_gccnew build_pgi2020_nompi build_clangdev_nompi build_clangdev_offloadcuda_nompi build_gccnew_nompi build_gccnew_nompi_complex build_clangnew build_clangnew_complex build_clangnew_mixed build_clangnew_complex_mixed build_clangdev_offloadcuda_nompi_complex build_clangdev_offloadcuda_nompi_mixed build_clangdev_offloadcuda_nompi_complex_mixed build_aompnew_nompi build_aompnew build_aompnew_nompi_mixed build_aompnew_mixed build_aompnew_nompi_complex_mixed build_aompnew_complex_mixed build_aompnew_nompi_complex build_aompnew_complex build_gcclegacycuda build_gcclegacycuda_full build_gcclegacycuda_complex build_gccnew_complex build_gccdev build_gccdev_complex"
else
buildsys="build_gccnew build_pgi2020_nompi build_aompnew_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gccnew_complex build_clangnew build_clangdev_offloadcuda_nompi"
buildsys="build_gccnew build_pgi2020_nompi build_aompnew_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gccnew_complex build_clangnew build_clangdev_offloadcuda_nompi build_clangdev_offloadcuda_nompi_complex"
fi
;;
* )
@ -114,6 +118,9 @@ export OMP_NUM_THREADS=16
#export FI_PROVIDER=sockets
export I_MPI_FABRICS=shm
# LLVM Offload bug workaround 2021-03-02
export LIBOMP_USE_HIDDEN_HELPER_TASK=OFF
module() { eval `/usr/bin/modulecmd bash $*`; }
export SPACK_ROOT=$HOME/apps/spack
@ -160,7 +167,7 @@ echo --- PYTHONPATH=$PYTHONPATH
# Future improvement: use spack version or build for more compiler variants
#
export QE_VERSION=6.4.1
export QE_VERSION=6.7.0
sys=build_gccnew
# QE version 6.x unpacks to qe-; Older versions 5.x uses espresso-
export QE_PREFIX=qe-
@ -261,7 +268,7 @@ mkdir build
cd build
cmake -DWITH_F12=1 -DWITH_RANGE_COULOMB=1 -DWITH_COULOMB_ERF=1 \
-DCMAKE_INSTALL_PREFIX:PATH=$here -DCMAKE_INSTALL_LIBDIR:PATH=lib ..
make
make -j 48
make install
cd ..
@ -275,7 +282,7 @@ git checkout 4.3.4
autoreconf -i
./configure --prefix=$here --libdir=$herelib --enable-vxc --enable-fxc --enable-kxc \
--enable-shared --disable-static --enable-shared --disable-fortran LIBS=-lm
make
make -j 48
make install
cd ..
@ -288,7 +295,7 @@ mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=1 -DXC_MAX_ORDER=3 -DXCFUN_ENABLE_TESTS=0 \
-DCMAKE_INSTALL_PREFIX:PATH=$here -DCMAKE_INSTALL_LIBDIR:PATH=lib ..
make
make -j 48
make install
cd ..
cd ..
@ -301,7 +308,7 @@ cd pyscf/lib
mkdir build
cd build
cmake -DBUILD_LIBCINT=0 -DBUILD_LIBXC=0 -DBUILD_XCFUN=0 -DCMAKE_INSTALL_PREFIX:PATH=$here ..
make
make -j 48
echo --- PySCF build done
export PYTHONPATH=$topdir:$PYTHONPATH
export LD_LIBRARY_PATH=$herelib:$LD_LIBRARY_PATH
@ -341,6 +348,9 @@ cd $sys
if [[ $sys == *"gccnew"* ]]; then
ourenv=gccnewbuild
fi
if [[ $sys == *"gccdev"* ]]; then
ourenv=gccdevbuild
fi
if [[ $sys == *"gccold"* ]]; then
ourenv=gccoldbuild
fi
@ -384,12 +394,17 @@ gccnewbuild) echo $ourenv
spack load --first boost@$boost_vnew%gcc@$gcc_vnew
spack load hdf5@$hdf5_vnew%gcc@$gcc_vnew
spack load --first cmake@$cmake_vnew%gcc@$gcc_vnew
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
if [[ $sys != *"nompi"* ]]; then
spack load openmpi@$ompi_vnew%gcc@$gcc_vnew
spack load py-mpi4py%gcc@$gcc_vnew
fi
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
spack load py-numpy%gcc@$gcc_vnew
spack load py-scipy%gcc@$gcc_vnew
spack load py-h5py%gcc@$gcc_vnew
spack load py-lxml%gcc@$gcc_vnew
spack load py-pandas%gcc@$gcc_vnew
# if [ "$ourplatform" == "AMD" ]; then
# spack load amdblis
# spack load netlib-lapack
@ -412,6 +427,45 @@ gccnewbuild) echo $ourenv
# For debugging module availability etc. can check if afmctools are working here
#${test_dir}/qmcpack/utils/afqmctools/bin/pyscf_to_afqmc.py
;;
gccdevbuild) echo $ourenv
spack load gcc@master
spack load python%gcc@$gcc_vnew
spack load --first boost@$boost_vnew%gcc@$gcc_vnew
spack load hdf5@$hdf5_vnew%gcc@$gcc_vnew
spack load --first cmake@$cmake_vnew%gcc@$gcc_vnew
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
if [[ $sys != *"nompi"* ]]; then
spack load openmpi@$ompi_vnew%gcc@$gcc_vnew
spack load py-mpi4py%gcc@$gcc_vnew
fi
spack load py-numpy%gcc@$gcc_vnew
spack load py-scipy%gcc@$gcc_vnew
spack load py-h5py%gcc@$gcc_vnew
spack load py-lxml%gcc@$gcc_vnew
spack load py-pandas%gcc@$gcc_vnew
# if [ "$ourplatform" == "AMD" ]; then
# spack load amdblis
# spack load netlib-lapack
# else
# spack load blis
# spack load netlib-lapack
# fi
# spack load openblas%gcc@${gcc_vnew} threads=openmp
spack load openblas threads=openmp
# # Make PySCF available
# export PYSCF_BIN=$PYSCF_HOME
# export PYTHONPATH=${test_dir}/build_gccnew_pyscf/pyscf:$PYTHONPATH
# export PYTHONPATH=${test_dir}/qmcpack/utils/afqmctools/:$PYTHONPATH
# export PYTHONPATH=${test_dir}/qmcpack/src/QMCTools/:$PYTHONPATH
# export LD_LIBRARY_PATH=${test_dir}/build_gccnew_pyscf/pyscf/opt/lib:$LD_LIBRARY_PATH
# echo PYSCF_BIN=$PYSCF_HOME
# echo PYTHONPATH=$PYTHONPATH
# echo LD_LIBRARY_PATH=$LD_LIBRARY_PATH
# # For debugging module availability etc. can check if afmctools are working here
# #${test_dir}/qmcpack/utils/afqmctools/bin/pyscf_to_afqmc.py
;;
gccoldbuild) echo $ourenv
spack load gcc@$gcc_vold
spack load python%gcc@$gcc_vold
@ -539,7 +593,11 @@ module list
# Construct test name and configure flags
# Compiler and major version, MPI or not
if [[ $sys == *"gcc"* ]]; then
compilerversion=`gcc --version|grep ^gcc|sed 's/^.* //g'|sed 's/\..*//g'`
if [[ $sys == *"gccdev"* ]]; then
compilerversion=Dev
else
compilerversion=`gcc --version|grep ^gcc|sed 's/^.* //g'|sed 's/\..*//g'`
fi
if [[ $sys == *"nompi"* ]]; then
QMCPACK_TEST_SUBMIT_NAME=GCC${compilerversion}-NoMPI
CTCFG="-DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ -DQMC_MPI=0"
@ -557,16 +615,16 @@ fi
fi
# On sulfur with gcc builds, workaround presumed AVX512 bug
case "$ourhostname" in
sulfur )
echo "Using GCC broadwell architecture override on $ourhostname"
CTXCFG="-DCMAKE_CXX_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math';-DCMAKE_C_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math'"
;;
*)
#case "$ourhostname" in
# sulfur )
# echo "Using GCC broadwell architecture override on $ourhostname"
# CTXCFG="-DCMAKE_CXX_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math';-DCMAKE_C_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math'"
# ;;
# *)
echo "No GCC workaround used on this host"
CTXCFG=""
;;
esac
# ;;
#esac
echo $CTXCFG
fi
@ -705,6 +763,16 @@ QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Full
CTCFG="$CTCFG -DQMC_MIXED_PRECISION=0"
fi
# SoA/AoS build (label aos only)
if [[ $sys == *"aos"* ]]; then
QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-AoS
CTCFG="$CTCFG -DENABLE_SOA=0"
echo "*** ERROR: AoS Builds are deprecated as of 2020-05-19"
exit 1
else
CTCFG="$CTCFG -DENABLE_SOA=1"
fi
# Boilerplate for all tests
CTCFG="$CTCFG -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1"
@ -721,7 +789,7 @@ fi
# Adjust which tests are run to control overall runtime
case "$sys" in
*intel2020*|*gccnew*|*clangnew*|*pgi*|*gcccuda|*aompnew_nompi_mixed) echo "Running full ("less limited") test set for $sys"
*intel2020*|*gccnew*|*clangnew*|*clangdev*|*pgi*|*gcccuda|*aompnew_nompi_mixed) echo "Running full ("less limited") test set for $sys"
THETESTS=$LESSLIMITEDTESTS
;;
*) echo "Running limited test set for $sys"
@ -731,7 +799,15 @@ esac
#THETESTS=$LIMITEDTESTS # for DEBUG. Remove for production.
echo $THETESTS
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Release
if [[ $sys == *"debug"* ]]; then
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Debug
CTCFG="-DCMAKE_BUILD_TYPE=Debug $CTCFG"
ctestscriptarg=debug
else
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Release
ctestscriptarg=release
fi
echo $QMCPACK_TEST_SUBMIT_NAME
echo $CTCFG
if [[ $localonly == "yes" ]]; then
@ -754,14 +830,14 @@ esac
echo --- END ctest `date`
else
echo --- START ctest `date`
echo ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS}
echo ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS}
#Workaround CUDA concurrency problems
case "$sys" in
*cuda*)
ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS} -DN_CONCURRENT_TESTS=1
ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS} -DN_CONCURRENT_TESTS=1
;;
*)
ctest -j 48 ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS}
ctest -j 48 ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS}
;;
esac
echo --- END ctest `date`

View File

@ -168,12 +168,12 @@ git clone https://github.com/spack/spack.git
cd $HOME/apps/spack
# For reproducibility, use a specific version of Spack
# Prefer to use tagged releases https://github.com/spack/spack/releases
git checkout e15a3438a8311d1be596a949f08cf30584c57d48
#commit e15a3438a8311d1be596a949f08cf30584c57d48
#Author: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com>
#Date: Wed Feb 10 10:14:16 2021 -0800
git checkout 08054ffce73dc9aeb1dabe7a3abfdb446653cc4f
#commit 08054ffce73dc9aeb1dabe7a3abfdb446653cc4f (HEAD -> develop, origin/develop, origin/HEAD)
#Author: Andrew W Elble <aweits@rit.edu>
#Date: Mon Feb 22 14:02:10 2021 -0500
#
# ascent: add version v0.6.0 (#21573)
# py-torch: ensure libtorch_global_deps is linked with the c++ library (#21860)
echo --- Git version and last log entry
git log -1
@ -319,7 +319,7 @@ echo --- New python modules
#spack install py-numpy^blis%gcc@${gcc_vnew} # will pull in libflame (problems 2020-03-27)
spack load gcc@${gcc_vnew}
spack load python@${python_version}%gcc@${gcc_vnew}
spack install py-numpy%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew} # Will pull in OpenBLAS
#spack install py-numpy%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew} # Will pull in OpenBLAS
spack install py-scipy%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew}
#spack install py-mpi4py%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew} ^openmpi@${ompi_vnew}%gcc@${gcc_vnew} ^libxml2@${libxml2_v}%gcc@${gcc_vnew}
spack install py-setuptools%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew}
@ -328,7 +328,7 @@ spack install py-h5py%gcc@${gcc_vnew} -mpi ^python@${python_version}%gcc@${gcc_
spack install py-pandas%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew}
spack install py-lxml%gcc@${gcc_vnew}
spack install py-matplotlib%gcc@${gcc_vnew} ^python@${python_version}%gcc@${gcc_vnew}
spack activate py-numpy%gcc@${gcc_vnew}
#spack activate py-numpy%gcc@${gcc_vnew}
spack activate py-scipy%gcc@${gcc_vnew}
spack activate py-h5py%gcc@${gcc_vnew}
spack activate py-pandas%gcc@${gcc_vnew}
@ -341,7 +341,7 @@ echo --- Old python modules
spack load gcc@${gcc_vold}
spack load python@${python_version}%gcc@${gcc_vold}
spack install py-numpy%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold} # Will pull in OpenBLAS
#SKIP#BADspack install py-scipy%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold} # Will pull in py-pybind11 and cmake which won't bootstrap 20201223
#spack install py-scipy%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold} # Will pull in py-pybind11 and cmake which won't bootstrap 20201223
#SKIPspack install py-setuptools%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold}
#SKIP#BAD gives dupe python spack install py-mpi4py%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold} ^openmpi@${ompi_vold}%gcc@${gcc_vold} ^libxml2@${libxml2_v}%gcc@${gcc_vold}
#SKIPspack install py-mpi4py%gcc@${gcc_vold} ^openmpi@${ompi_vold}%gcc@${gcc_vold} ^py-setuptools%gcc@${gcc_vold} ^python@${python_version}%gcc@${gcc_vold}

View File

@ -15,6 +15,9 @@ else
exit 1
fi
spack uninstall -y gcc@master
spack install gcc@master
spack uninstall -y llvm@main
spack install llvm@main +cuda cuda_arch=70 ^python@${python_version}%gcc@${gcc_vnew}

View File

@ -23,7 +23,7 @@ hdf5_vold=1.8.19 # Released 2017-06-16
# CMake
# Dates at https://cmake.org/files/
cmake_vnew=3.19.4 # Released 20210-01-28
cmake_vnew=3.19.5 # Released 2021-02-15
cmake_vold=3.13.2 # Released 2018-12-13
# OpenMPI

View File

@ -23,14 +23,18 @@ esac
if [[ $localonly == "yes" ]]; then
echo --- Local CMake/Make/CTest only. No cdash drop.
echo --- Local CMake/Make/CTest only. No cdash drop.
fi
if [ -e `dirname "$0"`/ornl_update.sh ]; then
echo --- Updates
source `dirname "$0"`/ornl_update.sh
if [[ $jobtype == "weekly" ]]; then
if [ -e `dirname "$0"`/ornl_update.sh ]; then
echo --- Running compiler updates
source `dirname "$0"`/ornl_update.sh
fi
fi
if [ -e `dirname "$0"`/ornl_versions.sh ]; then
source `dirname "$0"`/ornl_versions.sh
else
@ -62,16 +66,16 @@ echo --- Host is $ourhostname
case "$ourhostname" in
sulfur )
if [[ $jobtype == "nightly" ]]; then
buildsys="build_gccnew build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gccnew_nompi_mkl build_gccold_nompi_mkl build_clangnew_nompi_mkl build_gccnew_nompi build_clangnew_nompi build_gccnew_mkl build_gccnew_mkl_complex build_clangnew_mkl build_clangnew_mkl_complex build_clangnew_mkl_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gcclegacycuda_full build_pgi2020_nompi build_clangdev_nompi_mkl"
buildsys="build_gccnew build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gccnew_nompi_mkl build_gccold_nompi_mkl build_clangnew_nompi_mkl build_gccnew_nompi build_clangnew_nompi build_gccnew_mkl build_gccnew_mkl_complex build_gccdev_mkl build_gccdev_mkl_complex build_clangnew_mkl build_clangnew_mkl_complex build_clangnew_mkl_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gcclegacycuda_full build_pgi2020_nompi build_clangdev_nompi_mkl build_clangdev_nompi_mkl_complex build_clangdev_offloadcuda_nompi_mkl build_clangdev_offloadcuda_nompi_mkl_complex build_gccnew_debug_mkl build_gccnew_debug_complex_mkl"
else
buildsys="build_gccnew_mkl_nompi build_clangnew_mkl_nompi build_intel2020_nompi build_intel2020 build_intel2020_complex build_intel2020_mixed build_intel2020_complex_mixed build_gcclegacycuda build_gcclegacycuda_complex build_pgi2020_nompi"
fi
;;
nitrogen )
if [[ $jobtype == "nightly" ]]; then
buildsys="build_gccnew build_pgi2020_nompi build_clangdev_nompi build_clangdev_offloadcuda_nompi build_gccnew_nompi build_gccnew_nompi_complex build_clangnew build_clangnew_complex build_clangnew_mixed build_clangnew_complex_mixed build_aompnew_nompi build_aompnew build_aompnew_nompi_mixed build_aompnew_mixed build_aompnew_nompi_complex_mixed build_aompnew_complex_mixed build_aompnew_nompi_complex build_aompnew_complex build_gcclegacycuda build_gcclegacycuda_full build_gcclegacycuda_complex build_gccnew_complex"
buildsys="build_gccnew build_pgi2020_nompi build_clangdev_nompi build_clangdev_offloadcuda_nompi build_gccnew_nompi build_gccnew_nompi_complex build_clangnew build_clangnew_complex build_clangnew_mixed build_clangnew_complex_mixed build_clangdev_offloadcuda_nompi_complex build_clangdev_offloadcuda_nompi_mixed build_clangdev_offloadcuda_nompi_complex_mixed build_aompnew_nompi build_aompnew build_aompnew_nompi_mixed build_aompnew_mixed build_aompnew_nompi_complex_mixed build_aompnew_complex_mixed build_aompnew_nompi_complex build_aompnew_complex build_gcclegacycuda build_gcclegacycuda_full build_gcclegacycuda_complex build_gccnew_complex build_gccdev build_gccdev_complex"
else
buildsys="build_gccnew build_pgi2020_nompi build_aompnew_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gccnew_complex build_clangnew build_clangdev_offloadcuda_nompi"
buildsys="build_gccnew build_pgi2020_nompi build_aompnew_mixed build_gcclegacycuda build_gcclegacycuda_complex build_gccnew_complex build_clangnew build_clangdev_offloadcuda_nompi build_clangdev_offloadcuda_nompi_complex"
fi
;;
* )
@ -114,6 +118,9 @@ export OMP_NUM_THREADS=16
#export FI_PROVIDER=sockets
export I_MPI_FABRICS=shm
# LLVM Offload bug workaround 2021-03-02
export LIBOMP_USE_HIDDEN_HELPER_TASK=OFF
module() { eval `/usr/bin/modulecmd bash $*`; }
export SPACK_ROOT=$HOME/apps/spack
@ -160,7 +167,7 @@ echo --- PYTHONPATH=$PYTHONPATH
# Future improvement: use spack version or build for more compiler variants
#
export QE_VERSION=6.4.1
export QE_VERSION=6.7.0
sys=build_gccnew
# QE version 6.x unpacks to qe-; Older versions 5.x uses espresso-
export QE_PREFIX=qe-
@ -261,7 +268,7 @@ mkdir build
cd build
cmake -DWITH_F12=1 -DWITH_RANGE_COULOMB=1 -DWITH_COULOMB_ERF=1 \
-DCMAKE_INSTALL_PREFIX:PATH=$here -DCMAKE_INSTALL_LIBDIR:PATH=lib ..
make
make -j 48
make install
cd ..
@ -275,7 +282,7 @@ git checkout 4.3.4
autoreconf -i
./configure --prefix=$here --libdir=$herelib --enable-vxc --enable-fxc --enable-kxc \
--enable-shared --disable-static --enable-shared --disable-fortran LIBS=-lm
make
make -j 48
make install
cd ..
@ -288,7 +295,7 @@ mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=1 -DXC_MAX_ORDER=3 -DXCFUN_ENABLE_TESTS=0 \
-DCMAKE_INSTALL_PREFIX:PATH=$here -DCMAKE_INSTALL_LIBDIR:PATH=lib ..
make
make -j 48
make install
cd ..
cd ..
@ -301,7 +308,7 @@ cd pyscf/lib
mkdir build
cd build
cmake -DBUILD_LIBCINT=0 -DBUILD_LIBXC=0 -DBUILD_XCFUN=0 -DCMAKE_INSTALL_PREFIX:PATH=$here ..
make
make -j 48
echo --- PySCF build done
export PYTHONPATH=$topdir:$PYTHONPATH
export LD_LIBRARY_PATH=$herelib:$LD_LIBRARY_PATH
@ -341,6 +348,9 @@ cd $sys
if [[ $sys == *"gccnew"* ]]; then
ourenv=gccnewbuild
fi
if [[ $sys == *"gccdev"* ]]; then
ourenv=gccdevbuild
fi
if [[ $sys == *"gccold"* ]]; then
ourenv=gccoldbuild
fi
@ -384,12 +394,17 @@ gccnewbuild) echo $ourenv
spack load --first boost@$boost_vnew%gcc@$gcc_vnew
spack load hdf5@$hdf5_vnew%gcc@$gcc_vnew
spack load --first cmake@$cmake_vnew%gcc@$gcc_vnew
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
if [[ $sys != *"nompi"* ]]; then
spack load openmpi@$ompi_vnew%gcc@$gcc_vnew
spack load py-mpi4py%gcc@$gcc_vnew
fi
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
spack load py-numpy%gcc@$gcc_vnew
spack load py-scipy%gcc@$gcc_vnew
spack load py-h5py%gcc@$gcc_vnew
spack load py-lxml%gcc@$gcc_vnew
spack load py-pandas%gcc@$gcc_vnew
# if [ "$ourplatform" == "AMD" ]; then
# spack load amdblis
# spack load netlib-lapack
@ -412,6 +427,45 @@ gccnewbuild) echo $ourenv
# For debugging module availability etc. can check if afmctools are working here
#${test_dir}/qmcpack/utils/afqmctools/bin/pyscf_to_afqmc.py
;;
gccdevbuild) echo $ourenv
spack load gcc@master
spack load python%gcc@$gcc_vnew
spack load --first boost@$boost_vnew%gcc@$gcc_vnew
spack load hdf5@$hdf5_vnew%gcc@$gcc_vnew
spack load --first cmake@$cmake_vnew%gcc@$gcc_vnew
spack load --first libxml2@$libxml2_v%gcc@$gcc_vnew
spack load --first fftw@$fftw_vnew%gcc@$gcc_vnew
if [[ $sys != *"nompi"* ]]; then
spack load openmpi@$ompi_vnew%gcc@$gcc_vnew
spack load py-mpi4py%gcc@$gcc_vnew
fi
spack load py-numpy%gcc@$gcc_vnew
spack load py-scipy%gcc@$gcc_vnew
spack load py-h5py%gcc@$gcc_vnew
spack load py-lxml%gcc@$gcc_vnew
spack load py-pandas%gcc@$gcc_vnew
# if [ "$ourplatform" == "AMD" ]; then
# spack load amdblis
# spack load netlib-lapack
# else
# spack load blis
# spack load netlib-lapack
# fi
# spack load openblas%gcc@${gcc_vnew} threads=openmp
spack load openblas threads=openmp
# # Make PySCF available
# export PYSCF_BIN=$PYSCF_HOME
# export PYTHONPATH=${test_dir}/build_gccnew_pyscf/pyscf:$PYTHONPATH
# export PYTHONPATH=${test_dir}/qmcpack/utils/afqmctools/:$PYTHONPATH
# export PYTHONPATH=${test_dir}/qmcpack/src/QMCTools/:$PYTHONPATH
# export LD_LIBRARY_PATH=${test_dir}/build_gccnew_pyscf/pyscf/opt/lib:$LD_LIBRARY_PATH
# echo PYSCF_BIN=$PYSCF_HOME
# echo PYTHONPATH=$PYTHONPATH
# echo LD_LIBRARY_PATH=$LD_LIBRARY_PATH
# # For debugging module availability etc. can check if afmctools are working here
# #${test_dir}/qmcpack/utils/afqmctools/bin/pyscf_to_afqmc.py
;;
gccoldbuild) echo $ourenv
spack load gcc@$gcc_vold
spack load python%gcc@$gcc_vold
@ -539,7 +593,11 @@ module list
# Construct test name and configure flags
# Compiler and major version, MPI or not
if [[ $sys == *"gcc"* ]]; then
compilerversion=`gcc --version|grep ^gcc|sed 's/^.* //g'|sed 's/\..*//g'`
if [[ $sys == *"gccdev"* ]]; then
compilerversion=Dev
else
compilerversion=`gcc --version|grep ^gcc|sed 's/^.* //g'|sed 's/\..*//g'`
fi
if [[ $sys == *"nompi"* ]]; then
QMCPACK_TEST_SUBMIT_NAME=GCC${compilerversion}-NoMPI
CTCFG="-DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ -DQMC_MPI=0"
@ -557,16 +615,16 @@ fi
fi
# On sulfur with gcc builds, workaround presumed AVX512 bug
case "$ourhostname" in
sulfur )
echo "Using GCC broadwell architecture override on $ourhostname"
CTXCFG="-DCMAKE_CXX_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math';-DCMAKE_C_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math'"
;;
*)
#case "$ourhostname" in
# sulfur )
# echo "Using GCC broadwell architecture override on $ourhostname"
# CTXCFG="-DCMAKE_CXX_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math';-DCMAKE_C_FLAGS='-march=broadwell -O3 -DNDEBUG -fomit-frame-pointer -ffast-math'"
# ;;
# *)
echo "No GCC workaround used on this host"
CTXCFG=""
;;
esac
# ;;
#esac
echo $CTXCFG
fi
@ -705,6 +763,16 @@ QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Full
CTCFG="$CTCFG -DQMC_MIXED_PRECISION=0"
fi
# SoA/AoS build (label aos only)
if [[ $sys == *"aos"* ]]; then
QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-AoS
CTCFG="$CTCFG -DENABLE_SOA=0"
echo "*** ERROR: AoS Builds are deprecated as of 2020-05-19"
exit 1
else
CTCFG="$CTCFG -DENABLE_SOA=1"
fi
# Boilerplate for all tests
CTCFG="$CTCFG -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1"
@ -721,7 +789,7 @@ fi
# Adjust which tests are run to control overall runtime
case "$sys" in
*intel2020*|*gccnew*|*clangnew*|*pgi*|*gcccuda|*aompnew_nompi_mixed) echo "Running full ("less limited") test set for $sys"
*intel2020*|*gccnew*|*clangnew*|*clangdev*|*pgi*|*gcccuda|*aompnew_nompi_mixed) echo "Running full ("less limited") test set for $sys"
THETESTS=$LESSLIMITEDTESTS
;;
*) echo "Running limited test set for $sys"
@ -731,7 +799,15 @@ esac
#THETESTS=$LIMITEDTESTS # for DEBUG. Remove for production.
echo $THETESTS
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Release
if [[ $sys == *"debug"* ]]; then
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Debug
CTCFG="-DCMAKE_BUILD_TYPE=Debug $CTCFG"
ctestscriptarg=debug
else
export QMCPACK_TEST_SUBMIT_NAME=${QMCPACK_TEST_SUBMIT_NAME}-Release
ctestscriptarg=release
fi
echo $QMCPACK_TEST_SUBMIT_NAME
echo $CTCFG
if [[ $localonly == "yes" ]]; then
@ -754,14 +830,14 @@ esac
echo --- END ctest `date`
else
echo --- START ctest `date`
echo ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS}
echo ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS}
#Workaround CUDA concurrency problems
case "$sys" in
*cuda*)
ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS} -DN_CONCURRENT_TESTS=1
ctest ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS} -DN_CONCURRENT_TESTS=1
;;
*)
ctest -j 48 ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,release ${THETESTS}
ctest -j 48 ${CTCFG} ${GLOBALTCFG} "$CTXCFG" -DQMC_DATA=${QMC_DATA} -DENABLE_TIMERS=1 -S $PWD/../qmcpack/CMake/ctest_script.cmake,$ctestscriptarg ${THETESTS}
;;
esac
echo --- END ctest `date`