Skip to content

Instantly share code, notes, and snippets.

@luiscape
Last active January 4, 2023 03:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save luiscape/577386e84817450958035b9dd1227f11 to your computer and use it in GitHub Desktop.
Save luiscape/577386e84817450958035b9dd1227f11 to your computer and use it in GitHub Desktop.
Runs `diffusers-rs` on Modal.
# Runs diffusers-rs on Modal.
import os
import time
import modal
import subprocess
CARGO_PATH:str = "/root/.cargo/bin/cargo"
def _convert_clip():
import numpy as np
import torch
# convert it to numpy
clip_path = "/root/clip.bin"
print(f"converting clip => {clip_path}")
model = torch.load(clip_path)
np.savez("/root/clip_v2.1.npz", **{k: v.numpy() for k, v in model.items() if "text_model" in k})
# convert to ot
subprocess.run(
f"{CARGO_PATH} run --release --example tensor-tools cp /root/clip_v2.1.npz /root/diffusers-rs/data/clip_v2.1.ot".split(),
cwd="/root/tch-rs", # path to git repo tch-rs
check=True
)
def _convert_unet_vae():
import numpy as np
import torch
# convert models to ot
print("converting model to ot")
for f in ["vae.bin", "unet.bin"]:
name, _ = f.split(".")
model = torch.load(f"./{f}")
np.savez(f"/root/{name}.npz", **{k: v.numpy() for k, v in model.items()})
subprocess.run(
f"{CARGO_PATH} run --release --example tensor-tools cp /root/{name}.npz /root/diffusers-rs/data/{name}.ot".split(),
cwd="/root/tch-rs", # path to git repo tch-rs
check=True
)
stub = modal.Stub("rust-diffusion")
image = (
modal.Image.debian_slim()
.apt_install("unzip", "git", "curl", "wget", "pkg-config", "libssl-dev", "build-essential")
.run_commands(
"curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y",
f"{CARGO_PATH} --version",
)
.pip_install("torch", "numpy")
.run_commands(
"git clone https://github.com/LaurentMazare/tch-rs /root/tch-rs",
"git clone https://github.com/LaurentMazare/diffusers-rs /root/diffusers-rs",
"mkdir /root/diffusers-rs/data/"
)
.run_commands(
"wget -q -O bpe_simple_vocab_16e6.txt.gz https://github.com/openai/CLIP/raw/main/clip/bpe_simple_vocab_16e6.txt.gz",
"gunzip bpe_simple_vocab_16e6.txt.gz",
"mv bpe_simple_vocab_16e6.txt /root/diffusers-rs/data/bpe_simple_vocab_16e6.txt"
)
.run_commands(
"wget -q -O /root/clip.bin https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/fp16/text_encoder/pytorch_model.bin",
"wget -q -O /root/vae.bin https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/fp16/vae/diffusion_pytorch_model.bin",
"wget -q -O /root/unet.bin https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/fp16/unet/diffusion_pytorch_model.bin"
)
.run_function(_convert_clip)
.run_function(_convert_unet_vae)
.run_commands(
"wget -q -O libtorch.zip https://download.pytorch.org/libtorch/cu117/libtorch-cxx11-abi-shared-with-deps-1.13.1%2Bcu117.zip",
"unzip -q libtorch.zip",
)
)
stub.image = image
@stub.function(gpu=modal.gpu.A10G())
def main():
start = time.time()
env = os.environ.copy()
env["LIBTORCH"] = "/root/libtorch"
env["LD_LIBRARY_PATH"] = "/root/libtorch/lib"
print("running diffusers-rs")
print(
subprocess.run(
f"{CARGO_PATH} run --example stable-diffusion --features clap".split(),
cwd="/root/diffusers-rs",
env=env,
check=True))
print(f"total time => {time.time() - start:.3f}s")
if __name__ == "__main__":
with stub.run():
main.call()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment