Skip to content

Instantly share code, notes, and snippets.

@Avaray
Last active May 2, 2024 07:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Avaray/af31cb3843d1532bd3ac6813c71f0eee to your computer and use it in GitHub Desktop.
Save Avaray/af31cb3843d1532bd3ac6813c71f0eee to your computer and use it in GitHub Desktop.
ai-dock/provisioning/default.sh
#!/bin/false
DISK_GB_REQUIRED=30
MAMBA_PACKAGES=(
#"package1"
#"package2=version"
)
PIP_PACKAGES=(
"bitsandbytes==0.41.2.post2"
)
EXTENSIONS=(
"https://github.com/Mikubill/sd-webui-controlnet"
"https://github.com/adieyal/sd-dynamic-prompts"
"https://github.com/AUTOMATIC1111/stable-diffusion-webui-wildcards"
"https://github.com/hako-mikan/sd-webui-regional-prompter"
"https://github.com/Bing-su/adetailer"
"https://github.com/pharmapsychotic/clip-interrogator"
)
CHECKPOINT_MODELS=(
#"https://civitai.com/api/download/models/358527"
)
LORA_MODELS=(
#"https://huggingface.co/datasets/AddictiveFuture/lora/resolve/main/LowRA.safetensors" # LowRA
#"https://civitai.com/api/download/models/18445" # epi_noiseoffset https://civitai.com/models/13941?modelVersionId=18445
#"https://civitai.com/api/download/models/76092" # Sharpness Tweaker https://civitai.com/models/69267/sharpness-tweaker-lora-lora
#"https://civitai.com/api/download/models/83148" # Photoblend https://civitai.com/models/78349/photoblend
#"https://civitai.com/api/download/models/126824" # Weight Slider https://civitai.com/models/112552
#"https://civitai.com/api/download/models/57468" # Shiny oiled skin 2.0 https://civitai.com/models/17455?modelVersionId=57459
#"https://civitai.com/api/download/models/206796" # epiC35mm https://civitai.com/models/184260
#"https://civitai.com/api/download/models/149806" # Fisheye Slider https://civitai.com/models/135838
)
VAE_MODELS=(
#"https://huggingface.co/stabilityai/sd-vae-ft-ema-original/resolve/main/vae-ft-ema-560000-ema-pruned.safetensors"
"https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors"
)
ESRGAN_MODELS=(
"https://huggingface.co/thataigod/UltraSharp4X/resolve/main/4x-UltraSharp.pth"
"https://huggingface.co/uwg/upscaler/resolve/main/ESRGAN/8x_NMKD-Superscale_150000_G.pth"
#"https://huggingface.co/ai-forever/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth"
#"https://huggingface.co/FacehugmanIII/4x_foolhardy_Remacri/resolve/main/4x_foolhardy_Remacri.pth"
#"https://huggingface.co/Akumetsu971/SD_Anime_Futuristic_Armor/resolve/main/4x_NMKD-Siax_200k.pth"
)
CONTROLNET_MODELS=(
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_canny-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_depth-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_hed-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_mlsd-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_normal-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_openpose-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_scribble-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_seg-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_canny-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_color-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_depth-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_keypose-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_openpose-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_seg-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_sketch-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_style-fp16.safetensors"
)
### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ###
function provisioning_start() {
source /opt/ai-dock/etc/environment.sh
DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000))
DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000))
DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED))
provisioning_print_header
provisioning_get_mamba_packages
provisioning_get_pip_packages
provisioning_get_extensions
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/ckpt" \
"${CHECKPOINT_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/lora" \
"${LORA_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/controlnet" \
"${CONTROLNET_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/vae" \
"${VAE_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/esrgan" \
"${ESRGAN_MODELS[@]}"
PLATFORM_FLAGS=""
if [[ $XPU_TARGET = "CPU" ]]; then
PLATFORM_FLAGS="--use-cpu all --skip-torch-cuda-test --no-half"
fi
PROVISIONING_FLAGS="--skip-python-version-check --no-download-sd-model --do-not-download-clip --port 11404 --exit"
FLAGS_COMBINED="${PLATFORM_FLAGS} $(cat /etc/a1111_webui_flags.conf) ${PROVISIONING_FLAGS}"
# Start and exit because webui will probably require a restart
cd /opt/stable-diffusion-webui && \
micromamba run -n webui -e LD_PRELOAD=libtcmalloc.so python launch.py \
${FLAGS_COMBINED}
provisioning_print_end
}
function provisioning_get_mamba_packages() {
if [[ -n $MAMBA_PACKAGES ]]; then
$MAMBA_INSTALL -n webui ${MAMBA_PACKAGES[@]}
fi
}
function provisioning_get_pip_packages() {
if [[ -n $PIP_PACKAGES ]]; then
micromamba run -n webui $PIP_INSTALL ${PIP_PACKAGES[@]}
fi
}
function provisioning_get_extensions() {
for repo in "${EXTENSIONS[@]}"; do
dir="${repo##*/}"
path="/opt/stable-diffusion-webui/extensions/${dir}"
requirements="${path}/requirements.txt"
if [[ -d $path ]]; then
if [[ ${AUTO_UPDATE,,} != "false" ]]; then
printf "Updating extension: %s...\n" "${repo}"
( cd "$path" && git pull )
if [[ -e $requirements ]]; then
micromamba -n webui run ${PIP_INSTALL} -r "$requirements"
fi
fi
else
printf "Downloading extension: %s...\n" "${repo}"
git clone "${repo}" "${path}" --recursive
if [[ -e $requirements ]]; then
micromamba -n webui run ${PIP_INSTALL} -r "${requirements}"
fi
fi
done
}
function provisioning_get_models() {
if [[ -z $2 ]]; then return 1; fi
dir="$1"
mkdir -p "$dir"
shift
if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then
arr=("$@")
else
printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n"
arr=("$1")
fi
printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir"
for url in "${arr[@]}"; do
printf "Downloading: %s\n" "${url}"
provisioning_download "${url}" "${dir}"
printf "\n"
done
}
function provisioning_print_header() {
printf "\n##############################################\n# #\n# Provisioning container #\n# #\n# This will take some time #\n# #\n# Your container will be ready on completion #\n# #\n##############################################\n\n"
if [[ $DISK_GB_ALLOCATED -lt $DISK_GB_REQUIRED ]]; then
printf "WARNING: Your allocated disk size (%sGB) is below the recommended %sGB - Some models will not be downloaded\n" "$DISK_GB_ALLOCATED" "$DISK_GB_REQUIRED"
fi
}
function provisioning_print_end() {
printf "\nProvisioning complete: Web UI will start now\n\n"
}
# Download from $1 URL to $2 file path
function provisioning_download() {
wget -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1"
}
provisioning_start
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment