- Use conda to install pytorch will have less trouble than with pip
Install lammps with openmpi support from conda-forge channel (or you can build your own lammps if you like)
- Install lammps from source or else your conda environemnt will be messed up!
Install tensorflow with pip instead of conda as it is slow to resolve environment!
Install pytorch and tensorflow with conda at the same time to avoid abi incompatible issue!
- For the sake of C++ABI compatibility, use conda to install tensorflow and pip for pytorch
- Don't use full oneapi with openmpi, just use mkl or else there will be segmentation fault
- deepmd source code
- lammps source code (match the one you install via conda)
conda create -p ./2024q1 pytorch=2.1.2=cuda118*1 -c conda-forge # this way the c++abi=0
source actviate ./2024q1
pip install tensorflow
bash build-deepmd.sh
# copy conda activate and deactivate file to etc/conda
#!/bin/bash
set -e
SCRIPT_PATH=$(realpath $0)
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
deepmd_source_dir=$SCRIPT_DIR/src/deepmd-kit
lammps_source_dir=$SCRIPT_DIR/src/lammps-2Aug2023
echo "SCRIPT_DIR=$SCRIPT_DIR"
module load mkl/2021.1.1
module load anaconda/2022.5
module load cuda/11.8
export NVCC_APPEND_FLAGS='-allow-unsupported-compiler'
source activate $SCRIPT_DIR
which python3
module load cmake/3.21
module load gcc/12.1
module load mpi/openmpi/4.0.3-gcc
export CC=`which gcc`
export CXX=`which g++`
export FC=`which gfortran`
export DP_VARIANT=cuda
# install python module
pushd $deepmd_source_dir
# you may need to clean git direcotry to avoid build failure
git clean -xdf
pip install .
popd
module unload cmake
module load dev/cmake/3.26.3
# build c++ module
deepmd_root=$SCRIPT_DIR/opt/deepmd
echo "deepmd_root=$deepmd_root"
mkdir -p $deepmd_root
rm -rf $deepmd_source_dir/source/build || true
mkdir -p $deepmd_source_dir/source/build
pushd $deepmd_source_dir/source/build
cmake -DUSE_TF_PYTHON_LIBS=TRUE -DENABLE_PYTORCH=TRUE -DENABLE_TENSORFLOW=TRUE -DCMAKE_INSTALL_PREFIX=$deepmd_root -DUSE_CUDA_TOOLKIT=TRUE -DLAMMPS_SOURCE_ROOT=$lammps_source_dir -DCMAKE_PREFIX_PATH=`python3 -c 'import torch;print(torch.utils.cmake_prefix_path)'` ..
make -j16
make install
popd
# install lammps
lammps_root=$SCRIPT_DIR/opt/lammps
rm -rf $lammps_source_dir/build || true
mkdir -p $lammps_source_dir/build
pushd $lammps_source_dir/build
# https://docs.lammps.org/Packages_details.html
cmake -D PKG_PLUGIN=ON -D PKG_EXTRA-FIX=ON -D PKG_KSPACE=ON -D PKG_MISC=ON -D PKG_PLUMED=ON -D LAMMPS_INSTALL_RPATH=ON -D BUILD_MPI=yes -D BUILD_OMP=yes -D BUILD_SHARED_LIBS=yes -D CMAKE_INSTALL_PREFIX=$lammps_root -D CMAKE_INSTALL_LIBDIR=lib -D CMAKE_INSTALL_FULL_LIBDIR=${lammps_root}/lib ../cmake
make -j16
make install
popd
module load mpi/openmpi/4.0.3-gcc
module load cuda/11.8
module load gcc/12.1
module load mkl/2021.1.1
export LAMMPS_PLUGIN_PATH=$CONDA_PREFIX/opt/deepmd/lib/deepmd_lmp
export LD_LIBRARY_PATH=$CONDA_PREFIX/opt/deepmd/lib:$LD_LIBRARY_PATH
export PATH=$CONDA_PREFIX/opt/lammps/bin:$PATH
export LAMMPS_PLUGIN_PATH=
export LD_LIBRARY_PATH=$(python3 -c "import os; print(':'.join(p for p in os.environ['LD_LIBRARY_PATH'].split(':') if p != '$CONDA_PREFIX/opt/deepmd/lib'))")
export PATH=$(python3 -c "import os; print(':'.join(p for p in os.environ['PATH'].split(':') if p != '$CONDA_PREFIX/opt/lammps/bin'))")
module unload mpi/openmpi/4.0.3-gcc gcc/12.1 cuda/11.8 mkl/2021.1