Skip to content

Commit

Permalink
cleanup unused
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed May 12, 2022
1 parent e0f2667 commit 55c658b
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 13 deletions.
6 changes: 0 additions & 6 deletions denoising_diffusion_pytorch/denoising_diffusion_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,12 +388,6 @@ def __init__(
register_buffer('posterior_mean_coef1', betas * torch.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))
register_buffer('posterior_mean_coef2', (1. - alphas_cumprod_prev) * torch.sqrt(alphas) / (1. - alphas_cumprod))

def q_mean_variance(self, x_start, t):
mean = extract(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start
variance = extract(1. - self.alphas_cumprod, t, x_start.shape)
log_variance = extract(self.log_one_minus_alphas_cumprod, t, x_start.shape)
return mean, variance, log_variance

def predict_start_from_noise(self, x_t, t, noise):
return (
extract(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -
Expand Down
7 changes: 0 additions & 7 deletions denoising_diffusion_pytorch/learned_gaussian_diffusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,13 +88,6 @@ def q_posterior_mean_variance(self, x_start, x_t, t):
posterior_log_variance_clipped = extract(self.posterior_log_variance_clipped, t, x_t.shape)
return posterior_mean, posterior_variance, posterior_log_variance_clipped

def predict_xstart_from_xprev(self, x_t, t, xprev):
# (xprev - coef2*x_t) / coef1
return (
extract(1. / self.posterior_mean_coef1, t, x_t.shape) * xprev -
extract(self.posterior_mean_coef2 / self.posterior_mean_coef1, t, x_t.shape) * x_t
)

def p_mean_variance(self, *, x, t, clip_denoised, model_output = None):
model_output = default(model_output, lambda: self.denoise_fn(x, t))
pred_noise, var_interp_frac_unnormalized = model_output.chunk(2, dim = 1)
Expand Down

0 comments on commit 55c658b

Please sign in to comment.