Skip to content

Commit

Permalink
Reorganise tests to match KernelFunctions (#78)
Browse files Browse the repository at this point in the history
* Reorganise tests to match KernelFunctions

* Rearrange includes and remove elbo.jl

* Bump patch version
  • Loading branch information
rossviljoen authored Nov 10, 2021
1 parent 122ff43 commit 675ab8a
Show file tree
Hide file tree
Showing 10 changed files with 442 additions and 419 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ApproximateGPs"
uuid = "298c2ebc-0411-48ad-af38-99e88101b606"
authors = ["JuliaGaussianProcesses Team"]
version = "0.2.0"
version = "0.2.1"

[deps]
AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"
Expand Down
3 changes: 1 addition & 2 deletions src/ApproximateGPs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@ export SparseVariationalApproximation
export DefaultQuadrature, Analytic, GaussHermite, MonteCarlo

include("utils.jl")
include("sparse_variational.jl")
include("expected_loglik.jl")
include("elbo.jl")
include("sparse_variational.jl")

using ForwardDiff

Expand Down
76 changes: 0 additions & 76 deletions src/elbo.jl

This file was deleted.

78 changes: 76 additions & 2 deletions src/sparse_variational.jl
Original file line number Diff line number Diff line change
Expand Up @@ -106,5 +106,79 @@ end

inducing_points(f::ApproxPosteriorGP{<:SparseVariationalApproximation}) = f.approx.fz.x

_chol_cov(q::AbstractMvNormal) = cholesky(Symmetric(cov(q)))
_chol_cov(q::MvNormal) = cholesky(q.Σ)
"""
elbo(svgp::SparseVariationalApproximation, fx::FiniteGP, y::AbstractVector{<:Real}; num_data=length(y), quadrature=DefaultQuadrature())
Compute the Evidence Lower BOund from [1] for the process `f = fx.f ==
svgp.fz.f` where `y` are observations of `fx`, pseudo-inputs are given by `z =
svgp.fz.x` and `q(u)` is a variational distribution over inducing points `u =
f(z)`.
`quadrature` selects which method is used to calculate the expected loglikelihood in
the ELBO. The options are: `DefaultQuadrature()`, `Analytic()`, `GaussHermite()` and
`MonteCarlo()`. For likelihoods with an analytic solution, `DefaultQuadrature()` uses this
exact solution. If there is no such solution, `DefaultQuadrature()` either uses
`GaussHermite()` or `MonteCarlo()`, depending on the likelihood.
N.B. the likelihood is assumed to be Gaussian with observation noise `fx.Σy`.
Further, `fx.Σy` must be isotropic - i.e. `fx.Σy = α * I`.
[1] - Hensman, James, Alexander Matthews, and Zoubin Ghahramani. "Scalable
variational Gaussian process classification." Artificial Intelligence and
Statistics. PMLR, 2015.
"""
function AbstractGPs.elbo(
sva::SparseVariationalApproximation,
fx::FiniteGP{<:AbstractGP,<:AbstractVector,<:Diagonal{<:Real,<:Fill}},
y::AbstractVector{<:Real};
num_data=length(y),
quadrature=DefaultQuadrature(),
)
@assert sva.fz.f === fx.f
return _elbo(quadrature, sva, fx, y, GaussianLikelihood(fx.Σy[1]), num_data)
end

function AbstractGPs.elbo(
::SparseVariationalApproximation, ::FiniteGP, ::AbstractVector; kwargs...
)
return error(
"The observation noise fx.Σy must be homoscedastic.\n To avoid this error, construct fx using: f = GP(kernel); fx = f(x, σ²)",
)
end

"""
elbo(svgp, ::SparseVariationalApproximation, lfx::LatentFiniteGP, y::AbstractVector; num_data=length(y), quadrature=DefaultQuadrature())
Compute the ELBO for a LatentGP with a possibly non-conjugate likelihood.
"""
function AbstractGPs.elbo(
sva::SparseVariationalApproximation,
lfx::LatentFiniteGP,
y::AbstractVector;
num_data=length(y),
quadrature=DefaultQuadrature(),
)
@assert sva.fz.f === lfx.fx.f
return _elbo(quadrature, sva, lfx.fx, y, lfx.lik, num_data)
end

# Compute the common elements of the ELBO
function _elbo(
quadrature::QuadratureMethod,
sva::SparseVariationalApproximation,
fx::FiniteGP,
y::AbstractVector,
lik,
num_data::Integer,
)
@assert sva.fz.f === fx.f
post = posterior(sva)
q_f = marginals(post(fx.x))
variational_exp = expected_loglik(quadrature, y, q_f, lik)

kl_term = KL(sva.q, sva.fz)

n_batch = length(y)
scale = num_data / n_batch
return sum(variational_exp) * scale - kl_term
end
3 changes: 3 additions & 0 deletions src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,6 @@ if VERSION < v"1.7"
return Cholesky{T,typeof(U.data)}(U.data, 'U', 0)
end
end

_chol_cov(q::AbstractMvNormal) = cholesky(Symmetric(cov(q)))
_chol_cov(q::MvNormal) = cholesky(q.Σ)
24 changes: 0 additions & 24 deletions test/elbo.jl

This file was deleted.

88 changes: 0 additions & 88 deletions test/equivalences.jl

This file was deleted.

Loading

2 comments on commit 675ab8a

@rossviljoen
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register()

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/48561

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.2.1 -m "<description of version>" 675ab8a7fa809286615150f41b1ad22df32156ef
git push origin v0.2.1

Please sign in to comment.