Skip to content

Commit

Permalink
Merge branch 'master' into torfjelde/new-gibbs
Browse files Browse the repository at this point in the history
  • Loading branch information
torfjelde committed Mar 10, 2024
2 parents 0f0bfd5 + 3a315ce commit 5e047ef
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 9 deletions.
7 changes: 3 additions & 4 deletions ext/TuringOptimExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,9 @@ function StatsBase.informationmatrix(m::ModeResult; hessian_function=ForwardDiff
Setfield.@set! m.f.varinfo = DynamicPPL.invlink!!(m.f.varinfo, m.f.model)
end

# Calculate the Hessian.
# Calculate the Hessian, which is the information matrix because the negative of the log likelihood was optimized
varnames = StatsBase.coefnames(m)
H = hessian_function(m.f, m.values.array[:, 1])
info = inv(H)
info = hessian_function(m.f, m.values.array[:, 1])

# Link it back if we invlinked it.
if linked
Expand All @@ -99,7 +98,7 @@ end
StatsBase.coef(m::ModeResult) = m.values
StatsBase.coefnames(m::ModeResult) = names(m.values)[1]
StatsBase.params(m::ModeResult) = StatsBase.coefnames(m)
StatsBase.vcov(m::ModeResult) = StatsBase.informationmatrix(m)
StatsBase.vcov(m::ModeResult) = inv(StatsBase.informationmatrix(m))
StatsBase.loglikelihood(m::ModeResult) = m.lp

####################
Expand Down
4 changes: 2 additions & 2 deletions src/mcmc/hmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -131,13 +131,13 @@ function DynamicPPL.initialstep(
rng::AbstractRNG,
model::AbstractModel,
spl::Sampler{<:Hamiltonian},
vi::AbstractVarInfo;
vi_original::AbstractVarInfo;
initial_params=nothing,
nadapts=0,
kwargs...
)
# Transform the samples to unconstrained space and compute the joint log probability.
vi = link!!(vi, spl, model)
vi = DynamicPPL.link(vi_original, spl, model)

# Extract parameters.
theta = vi[spl]
Expand Down
11 changes: 11 additions & 0 deletions test/mcmc/hmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -246,4 +246,15 @@
sample(demo_warn_initial_params(), NUTS(; adtype=adbackend), 5)
end
end

@turing_testset "(partially) issue: #2095" begin
@model function vector_of_dirichlet(::Type{TV}=Vector{Float64}) where {TV}
xs = Vector{TV}(undef, 2)
xs[1] ~ Dirichlet(ones(5))
xs[2] ~ Dirichlet(ones(5))
end
model = vector_of_dirichlet()
chain = sample(model, NUTS(), 1000)
@test mean(Array(chain)) 0.2
end
end
11 changes: 8 additions & 3 deletions test/optimisation/OptimInterface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,23 +56,28 @@ end
@testset "StatsBase integration" begin
Random.seed!(54321)
mle_est = optimize(gdemo_default, MLE())
# Calculated based on the two data points in gdemo_default, [1.5, 2.0]
true_values = [0.0625, 1.75]

@test coefnames(mle_est) == [:s, :m]

diffs = coef(mle_est).array - [0.0625031; 1.75001]
@test all(isapprox.(diffs, 0.0, atol=0.1))

infomat = [0.003907027690416608 4.157954948417027e-7; 4.157954948417027e-7 0.03125155528962335]
infomat = [2/(2 * true_values[1]^2) 0.0; 0.0 2/true_values[1]]
@test all(isapprox.(infomat - informationmatrix(mle_est), 0.0, atol=0.01))

vcovmat = [2*true_values[1]^2 / 2 0.0; 0.0 true_values[1]/2]
@test all(isapprox.(vcovmat - vcov(mle_est), 0.0, atol=0.01))

ctable = coeftable(mle_est)
@test ctable isa StatsBase.CoefTable

s = stderror(mle_est).array
@test all(isapprox.(s - [0.06250415643292194, 0.17677963626053916], 0.0, atol=0.01))

@test coefnames(mle_est) == Distributions.params(mle_est)
@test vcov(mle_est) == informationmatrix(mle_est)
@test vcov(mle_est) == inv(informationmatrix(mle_est))

@test isapprox(loglikelihood(mle_est), -0.0652883561466624, atol=0.01)
end
Expand All @@ -93,7 +98,7 @@ end
mle = optimize(model, MLE())

vcmat = inv(x'x)
vcmat_mle = informationmatrix(mle).array
vcmat_mle = vcov(mle).array

@test isapprox(mle.values.array, true_beta)
@test isapprox(vcmat, vcmat_mle)
Expand Down

0 comments on commit 5e047ef

Please sign in to comment.