Skip to content

Instantly share code, notes, and snippets.

@dadeba
Last active February 27, 2024 10:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dadeba/6955b336969c2f3e2a568fe06f364232 to your computer and use it in GitHub Desktop.
Save dadeba/6955b336969c2f3e2a568fe06f364232 to your computer and use it in GitHub Desktop.
axolotl with ROCm
### 1. install bnb for ROCm
### 1.1
git clone --recurse https://github.com/ROCmSoftwarePlatform/bitsandbytes
cd bitsandbytes
git checkout rocm_enabled
### 1.2
edit Makefile & "ROCM_TARGET=gfx1100 make hip; python setup.py install"
diff --git a/Makefile b/Makefile
index 00f5869..f4de8bc 100644
--- a/Makefile
+++ b/Makefile
@@ -114,8 +114,8 @@ cuda118: $(BUILD_DIR) env
$(GPP) -std=c++14 -DBUILD_CUDA -shared -fPIC $(INCLUDE) $(BUILD_DIR)/ops.o $(BUILD_DIR)/kernels.o $(BUILD_DIR)/l
ink.o $(FILES_CPP) -o ./bitsandbytes/libbitsandbytes_cuda$(CUDA_VERSION).so $(LIB)
hip: $(BUILD_DIR) env
- $(HIPCC) -std=c++14 -fPIC -c $(INCLUDE_ROCM) $(LIB_ROCM) $(CSRC)/ops.hip -o $(BUILD_DIR)/ops.o
- $(HIPCC) -std=c++14 -fPIC -c $(INCLUDE_ROCM) $(LIB_ROCM) $(CSRC)/kernels.hip -o $(BUILD_DIR)/kernels.o
+ $(HIPCC) -std=c++14 -fPIC -c --offload-arch=$(ROCM_TARGET) $(INCLUDE_ROCM) $(LIB_ROCM) $(CSRC)/ops.hip -o $(BUIL
D_DIR)/ops.o
+ $(HIPCC) -std=c++14 -fPIC -c --offload-arch=$(ROCM_TARGET) $(INCLUDE_ROCM) $(LIB_ROCM) $(CSRC)/kernels.hip -o $(BUILD_DIR)/kernels.o
$(GPP) -std=c++14 -D__HIP_PLATFORM_AMD__ -D__HIP_PLATFORM_HCC__ -DBUILD_HIP -shared -fPIC $(INCLUDE_ROCM) $(BUILD_DIR)/ops.o $(BUILD_DIR)/kernels.o $(FILES_CPP) -o ./bitsandbytes/libbitsandbytes_hip_nohipblaslt.so $(LIB_ROCM)
cuda12x: $(BUILD_DIR) env
### 2. install Axolotl
### 2.1
git clone https://github.com/OpenAccess-AI-Collective/axolotl
edit requirements.txt & setup # remove xformers from dependency in requirements.txt
diff --git a/requirements.txt b/requirements.txt
index 722a9c6..458a434 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -16,7 +16,7 @@ flash-attn==2.3.3
sentencepiece
wandb
einops
-xformers==0.0.22
+#xformers==0.0.22
optimum==1.16.2
hf_transfer
colorama
diff --git a/setup.py b/setup.py
index 4a949a6..b74c32b 100644
--- a/setup.py
+++ b/setup.py
@@ -46,8 +46,9 @@ def parse_requirements():
raise ValueError("Invalid version format")
if (major, minor) >= (2, 1):
- _install_requires.pop(_install_requires.index("xformers==0.0.22"))
- _install_requires.append("xformers>=0.0.23")
+ pass
+ #_install_requires.pop(_install_requires.index("xformers==0.0.22"))
+ #_install_requires.append("xformers>=0.0.23")
except PackageNotFoundError:
pass
### 2.1
pip3 install -e '.[deepspeed]' # do not use flash-attention
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment