From 703689ddd58f255e4bf5cad16e9f436834394a58 Mon Sep 17 00:00:00 2001 From: Geoff Pleiss <824157+gpleiss@users.noreply.github.com> Date: Mon, 18 Mar 2024 18:22:34 +0000 Subject: [PATCH] Improve sampling from GP predictive posteriors. In the covaraince matrix is now a rather than an . This change improves the samples from GP predictive posteriors. Rather than applying a low-rank approximation to , the now only applies a low-rank approximation to for sampling, and then adds on i.i.d. noise. --- gpytorch/likelihoods/gaussian_likelihood.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gpytorch/likelihoods/gaussian_likelihood.py b/gpytorch/likelihoods/gaussian_likelihood.py index e753f92c3..447989f4f 100644 --- a/gpytorch/likelihoods/gaussian_likelihood.py +++ b/gpytorch/likelihoods/gaussian_likelihood.py @@ -5,7 +5,7 @@ from typing import Any, Optional, Tuple, Union import torch -from linear_operator.operators import LinearOperator, MaskedLinearOperator, ZeroLinearOperator +from linear_operator.operators import LinearOperator, MaskedLinearOperator, PsdSumLinearOperator, ZeroLinearOperator from torch import Tensor from torch.distributions import Distribution, Normal @@ -114,7 +114,7 @@ def log_marginal( def marginal(self, function_dist: MultivariateNormal, *params: Any, **kwargs: Any) -> MultivariateNormal: mean, covar = function_dist.mean, function_dist.lazy_covariance_matrix noise_covar = self._shaped_noise_covar(mean.shape, *params, **kwargs) - full_covar = covar + noise_covar + full_covar = PsdSumLinearOperator(covar, noise_covar) return function_dist.__class__(mean, full_covar)