Skip to content

Instantly share code, notes, and snippets.

@opparco
Created August 27, 2023 00:50
Show Gist options
  • Save opparco/2c80aee814745d5220adc0d19471b09a to your computer and use it in GitHub Desktop.
Save opparco/2c80aee814745d5220adc0d19471b09a to your computer and use it in GitHub Desktop.
ddim for Animatediff webui
diff --git a/ldm/models/diffusion/ddim.py b/ldm/models/diffusion/ddim.py
index c6cfd57..2569ea1 100644
--- a/ldm/models/diffusion/ddim.py
+++ b/ldm/models/diffusion/ddim.py
@@ -25,12 +25,24 @@ class DDIMSampler(object):
self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,
num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)
alphas_cumprod = self.model.alphas_cumprod
+
+ beta_start = 0.00085
+ beta_end = 0.012
+ # beta_schedule = "linear"
+ # num_train_timesteps = 1000 # default
+
+ betas = torch.linspace(beta_start, beta_end, self.ddpm_num_timesteps, dtype=torch.float32)
+
+ alphas = 1.0 - betas
+ alphas_cumprod = torch.cumprod(alphas, dim=0)
+ alphas_cumprod_prev = torch.cat((torch.tensor([1.0], dtype=torch.float32), alphas_cumprod[:-1]))
+
assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'
to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)
- self.register_buffer('betas', to_torch(self.model.betas))
+ self.register_buffer('betas', to_torch(betas))
self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
- self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))
+ self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
# calculations for diffusion q(x_t | x_{t-1}) and others
self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment