Skip to content

Instantly share code, notes, and snippets.

@stzsch
Last active March 12, 2023 19:54
Show Gist options
  • Save stzsch/781406b9d4d621fe587b3dd368f88628 to your computer and use it in GitHub Desktop.
Save stzsch/781406b9d4d621fe587b3dd368f88628 to your computer and use it in GitHub Desktop.
text-generation-webui-docker
FROM python:3.10-slim
# set bash as current shell
RUN chsh -s /bin/bash
SHELL ["/bin/bash", "-c"]
# Install necessary packages
RUN apt-get update && \
apt-get install -y wget git
# Download and install Miniconda
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-py310_23.1.0-1-Linux-x86_64.sh && \
bash Miniconda3-py310_23.1.0-1-Linux-x86_64.sh -b -p /opt/conda && \
rm Miniconda3-py310_23.1.0-1-Linux-x86_64.sh && \
ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \
echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \
/opt/conda/bin/conda clean -afy && \
apt-get clean
# Set the environment variables for conda
ENV PATH="/opt/conda/bin:${PATH}"
ENV CONDA_AUTO_UPDATE_CONDA=false
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda init bash && \
conda create -n textgen"
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
conda install -y torchvision torchaudio pytorch-cuda=11.7 git -c pytorch -c nvidia"
ARG HASH=9849aac0f1284c5fa02509f1e197cc248e2c4700
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
git clone https://github.com/oobabooga/text-generation-webui && \
cd text-generation-webui && \
git checkout ${HASH} && \
pip install -r requirements.txt"
RUN apt-get install -y build-essential libopenmpi-dev && \
/bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
pip install deepspeed"
# https://github.com/oobabooga/text-generation-webui/issues/147#issuecomment-1453880733
ARG LLAMA_HASH=2845af9b7f09effe27081a0c42d59f1e66cd5c20
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
cd text-generation-webui && \
mkdir -p repositories && \
cd repositories && \
git clone https://github.com/facebookresearch/llama && \
cd llama && \
git checkout ${LLAMA_HASH} && \
pip install -r requirements.txt && \
pip install -e ."
# https://github.com/oobabooga/text-generation-webui/issues/147#issuecomment-1454798725
ARG LLAMA_8BIT_HASH=f1c92cfcd0c35d439998906da45dc4958cd81f6f
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
cd text-generation-webui && \
mkdir -p repositories && \
cd repositories && \
git clone https://github.com/tloen/llama-int8 && \
mv llama-int8 llama_int8 && \
cd llama_int8 && \
git checkout ${LLAMA_8BIT_HASH} && \
pip install -r requirements.txt "
ARG HF_HASH=d7e21f6471dec8a93c0e183c275a52c319fcff27
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
cd text-generation-webui && \
mkdir -p repositories && \
cd repositories && \
git clone https://github.com/huggingface/transformers && \
cd transformers && \
git fetch origin ${HF_HASH} && \
git switch -d ${HF_HASH}"
ARG FOUR_BIT_HASH=dc3f25d50a8518cfeff7cc4fc7bfcaef79d274fa
RUN /bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
cd text-generation-webui && \
mkdir -p repositories && \
cd repositories && \
git clone https://github.com/qwopqwop200/GPTQ-for-LLaMa && \
cd GPTQ-for-LLaMa && \
git fetch origin ${FOUR_BIT_HASH} && \
git switch -d ${FOUR_BIT_HASH}"
RUN apt-get install -y build-essential libopenmpi-dev && \
/bin/bash -c "source /opt/conda/etc/profile.d/conda.sh && \
conda activate textgen && \
pip uninstall -y transformers && \
pip install git+https://github.com/zphang/transformers@llama_push"
WORKDIR /text-generation-webui
# Run the server
ENV CLI_ARGS="--listen"
CMD [ "/bin/bash", "-c", "source /opt/conda/etc/profile.d/conda.sh && \
conda --version && \
conda activate textgen && nvidia-smi -L && \
pushd /text-generation-webui/repositories/GPTQ-for-LLaMa/ && \
python setup_cuda.py install && \
popd && \
python -c 'import torch; print(\"cuda \" + torch.version.cuda + \" \" + str(torch.cuda.is_available()))' && \
python server.py ${CLI_ARGS}"]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment