Skip to content

Commit

Permalink
Update Tests
Browse files Browse the repository at this point in the history
  • Loading branch information
paschermayr committed Apr 1, 2022
1 parent 46fcc57 commit 289dc63
Show file tree
Hide file tree
Showing 11 changed files with 146 additions and 22 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ModelWrappers"
uuid = "44c54197-9f56-47cc-9960-7f2e20bfb0d6"
authors = ["Patrick Aschermayr <[email protected]>"]
version = "0.1.11"
version = "0.1.12"

[deps]
ArgCheck = "dce04be8-c92d-5529-be00-80e4d2c0e197"
Expand Down
19 changes: 14 additions & 5 deletions src/Differentiation/checks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,34 @@ function check_gradients(
objective::Objective,
ADlibraries = [:ForwardDiff, :ReverseDiff, :Zygote],
θᵤ = randn(_rng, length(objective)),
difftune = map(backend -> DiffObjective(objective, AutomaticDiffTune(backend, objective)), ADlibraries)
difftune = map(backend -> DiffObjective(objective, AutomaticDiffTune(backend, objective)), ADlibraries);
printoutput = true
)
## Compute Gradients
ℓobjectiveresults = ℓGradientResult[]
for iter in eachindex(difftune)
push!(ℓobjectiveresults, log_density_and_gradient(difftune[iter], θᵤ))
println(ADlibraries[iter], " gradient call succesfull.")
if printoutput
println(ADlibraries[iter], " gradient call succesfull.")
end
end
## Check differences
ℓobjective_diff = map(
iter -> ℓobjectiveresults[1].ℓθᵤ - ℓobjectiveresults[iter].ℓθᵤ, eachindex(ℓobjectiveresults)
)
for iter in eachindex(ℓobjectiveresults)
println("Log objective result difference of ", ADlibraries[1], " against ", ADlibraries[iter], ": ", ℓobjective_diff[iter])
if printoutput
println("Log objective result difference of ", ADlibraries[1], " against ", ADlibraries[iter], ": ", ℓobjective_diff[iter])
end
end

ℓobjective_gradient_diff = map(
iter -> sum(abs.(ℓobjectiveresults[1].∇ℓθᵤ .- ℓobjectiveresults[iter].∇ℓθᵤ)), eachindex(ℓobjectiveresults)
)
for iter in eachindex(ℓobjective_gradient_diff)
println("Log objective gradient difference of ", ADlibraries[1], " against ", ADlibraries[iter], ": ", ℓobjective_gradient_diff[iter])
if printoutput
println("Log objective gradient difference of ", ADlibraries[1], " against ", ADlibraries[iter], ": ", ℓobjective_gradient_diff[iter])
end
end
## Compare against base Forward and ReverseDiff
grad_fd = ForwardDiff.gradient(objective, θᵤ)
Expand All @@ -42,7 +49,9 @@ function check_gradients(
(sum(abs.(result.∇ℓθᵤ .- grad_fd)), sum(abs.(result.∇ℓθᵤ .- grad_rd))), ℓobjectiveresults
)
for iter in eachindex(fdrd_diff)
println("Log objective gradient difference of ", ADlibraries[iter], " against Forward/Reverse call: ", fdrd_diff[iter])
if printoutput
println("Log objective gradient difference of ", ADlibraries[iter], " against Forward/Reverse call: ", fdrd_diff[iter])
end
end
## Return differences
return (
Expand Down
4 changes: 4 additions & 0 deletions src/ModelWrappers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,9 @@ include("Differentiation/Differentiation.jl")

############################################################################################
#export
export
UpdateBool,
UpdateTrue,
UpdateFalse

end
2 changes: 2 additions & 0 deletions src/Models/_soss.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#=
############################################################################################
using Soss: Soss, ConditionalModel
import Soss: Soss, predict, simulate
Expand Down Expand Up @@ -101,3 +102,4 @@ end
############################################################################################
# Export
export ModelWrapper
=#
3 changes: 2 additions & 1 deletion src/Models/modelwrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ ModelWrapper(parameter::A, flattendefault::F=FlattenDefault()) where {A<:NamedTu
############################################################################################
# Basic functions for Model struct
length(model::ModelWrapper) = model.info.unflatten.unflatten.sz[end]
paramnames(model::ModelWrapper) = paramnames(model.val)#keys(model.val), model.info.unflatten.unflatten.lengths)
paramnames(model::ModelWrapper) = keys(model.val)

############################################################################################
# A bunch of functions that can be used/extended for target model in Sampling process
Expand Down Expand Up @@ -280,6 +280,7 @@ export
ModelWrapper,
simulate,
length,
paramnames,
fill,
fill!,
subset,
Expand Down
1 change: 1 addition & 0 deletions src/Models/tagged.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ end
############################################################################################
# Basic functions for Tagged struct
length(tagged::Tagged) = tagged.info.unflatten.unflatten.sz[end]
paramnames(tagged::Tagged) = keys(tagged.parameter)

#A convenient method for evaluating a prior distribution of a NamedTuple parameter
function log_prior(tagged::Tagged, θ::NamedTuple)
Expand Down
36 changes: 35 additions & 1 deletion test/test-bijector.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
############################################################################################
# Bijectors
############################################################################################

@testset "Bijector - Simplex" begin
d = Distributions.Dirichlet(3,3)
simplex = [.3, .4, .3]
bij = Bijectors.SimplexBijector()
inv_bij = inverse(bij)
@test ModelWrappers._to_bijector(d) == ModelWrappers._to_bijector(bij)
@test ModelWrappers._checkparam(simplex, bij)
# Flatten
_vals, unflat = flatten(simplex, bij)
@test length(_vals) == length(simplex)-1
Expand All @@ -23,12 +25,25 @@
_vals6 = ModelWrappers.Simplex_from_flatten(_vals4)
@test all(simplex .≈ _vals5)
@test all(simplex .≈ _vals6)
# AD flatten
_vals, unflat = flatten(FlattenDefault(;unflattentype = UnflattenAD()), simplex, bij)
_vals_constrained = unflat(_vals)
@test sum(_vals_constrained) 1.0 atol = _TOL
θᵤ = randn(Float32, length(_vals))
#!NOTE: In ModelWrapper, would constrain after this step
_vals_constrained = unconstrain(inv_bij, unflat(θᵤ))
@test sum(_vals_constrained) 1.0 atol = _TOL
@test eltype(_vals_constrained) == eltype(θᵤ)
end


@testset "Bijector - PDMatrix" begin
d = Distributions.InverseWishart(10.0, [1.2 .5 ; .5 3.4])
covmat = [1.2 .5 ; .5 3.4]
bij = Bijectors.PDBijector()
inv_bij = inverse(bij)
@test ModelWrappers._to_bijector(d) == ModelWrappers._to_bijector(bij)
@test ModelWrappers._checkparam(covmat, bij)
# Flatten
_vals, unflat = flatten(covmat, bij)
@test length(_vals) == 3
Expand All @@ -45,12 +60,23 @@ end
_tag = ModelWrappers.tag(covmat, false, true)
_vals4 = ModelWrappers.flatten_Symmetric(covmat, _tag)
@test length(_vals4) == 3
# AD flatten
_vals, unflat = flatten(FlattenDefault(;unflattentype = UnflattenAD()), covmat, bij)
_vals_constrained = unflat(_vals)
@test all(_vals_constrained .≈ covmat)
θᵤ = randn(Float32, length(_vals))
#!NOTE: In ModelWrapper, would constrain after this step
_vals_constrained = unconstrain(inv_bij, unflat(θᵤ))
@test eltype(_vals_constrained) == eltype(θᵤ)
end

@testset "Bijector - CorrMatrix" begin
d = Distributions.LKJ(2, 1.0)
cormat = [1. .5 ; .5 1.]
bij = Bijectors.CorrBijector()
inv_bij = inverse(bij)
@test ModelWrappers._to_bijector(d) == ModelWrappers._to_bijector(bij)
@test ModelWrappers._checkparam(cormat, bij)
# Flatten
_vals, unflat = flatten(cormat, bij)
@test length(_vals) == 1
Expand All @@ -62,4 +88,12 @@ end
@test _valsᵤ[1, 2] != 0.0
_vals3 = unconstrain(inv_bij, _valsᵤ)
@test all(_vals3 .≈ cormat)
# AD flatten
_vals, unflat = flatten(FlattenDefault(;unflattentype = UnflattenAD()), cormat, bij)
_vals_constrained = unflat(_vals)
@test all(_vals_constrained .≈ cormat)
θᵤ = randn(Float32, length(_vals))
#!NOTE: In ModelWrapper, would constrain after this step
_vals_constrained = unconstrain(inv_bij, unflat(θᵤ))
@test eltype(_vals_constrained) == eltype(θᵤ)
end
74 changes: 62 additions & 12 deletions test/test-differentiation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@
modelExample = ModelWrapper(ExampleModel(), _val_examplemodel)
objectiveExample = Objective(modelExample, (data1, data2, data3, _idx))

θᵤ = randn(length(objectiveExample))
tune_fwd = AutomaticDiffTune(:ForwardDiff, objectiveExample)
tune_rd = AutomaticDiffTune(:ReverseDiff, objectiveExample)
tune_zyg = AutomaticDiffTune(:Zygote, objectiveExample)
fwd = DiffObjective(objectiveExample, tune_fwd)
rd = DiffObjective(objectiveExample, tune_rd)
zyg = DiffObjective(objectiveExample, tune_zyg)

@testset "AutoDiffContainer - Log Objective AutoDiff compatibility - Vectorized Model" begin
## Assign DiffTune
tune_fwd = AutomaticDiffTune(:ForwardDiff, objectiveExample)
tune_rd = AutomaticDiffTune(:ReverseDiff, objectiveExample)
tune_zyg = AutomaticDiffTune(:Zygote, objectiveExample)
fwd = DiffObjective(objectiveExample, tune_fwd)
rd = DiffObjective(objectiveExample, tune_rd)
zyg = DiffObjective(objectiveExample, tune_zyg)
theta_unconstrained = randn(length(modelExample))
## Compute Diffresult
_grad1 = _log_density_and_gradient(objectiveExample, tune_fwd, theta_unconstrained)
Expand All @@ -38,18 +37,34 @@ zyg = DiffObjective(objectiveExample, tune_zyg)
@test sum(abs.(grad_mod_fd - grad1.∇ℓθᵤ)) 0 atol = _TOL
@test sum(abs.(grad_mod_rd - grad2.∇ℓθᵤ)) 0 atol = _TOL
@test sum(abs.(grad_mod_zy - grad3.∇ℓθᵤ)) 0 atol = _TOL
## Checks
_output = check_gradients(_RNG, objectiveExample, [:ForwardDiff, :ReverseDiff, :Zygote]; printoutput = false)
@test sum(abs.(_output.ℓobjective_gradient_diff)) 0 atol = _TOL
## Update DiffTune
ModelWrappers.update(tune_fwd, objectiveExample)
ModelWrappers.update(tune_rd, objectiveExample)
ModelWrappers.update(tune_zyg, objectiveExample)
## Config DiffTune
theta_unconstrained2 = randn(length(objectiveExample))
ModelWrappers._config(ModelWrappers.ADForward(), objectiveExample, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADReverse(), objectiveExample, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADReverseUntaped(), objectiveExample, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADZygote(), objectiveExample, theta_unconstrained2)
end

############################################################################################
# Differentiation - Lower dimensions
modelLowerDim = ModelWrapper(LowerDims(), _val_lowerdims)
objectiveLowerDim = Objective(modelLowerDim, nothing)

fwd = DiffObjective(objectiveLowerDim, AutomaticDiffTune(:ForwardDiff, objectiveLowerDim))
rd = DiffObjective(objectiveLowerDim, AutomaticDiffTune(:ReverseDiff, objectiveLowerDim))
zyg = DiffObjective(objectiveLowerDim, AutomaticDiffTune(:Zygote, objectiveLowerDim))

@testset "AutoDiffContainer - Log Objective AutoDiff compatibility - Lower dimensions" begin
## Assign DiffTune
autodiff_fd = AutomaticDiffTune(:ForwardDiff, objectiveLowerDim)
autodiff_rd = AutomaticDiffTune(:ReverseDiff, objectiveLowerDim)
autodiff_zyg = AutomaticDiffTune(:Zygote, objectiveLowerDim)
fwd = DiffObjective(objectiveLowerDim, autodiff_fd)
rd = DiffObjective(objectiveLowerDim, autodiff_rd)
zyg = DiffObjective(objectiveLowerDim, autodiff_zyg)
theta_unconstrained = randn(length(objectiveLowerDim))
## Compute Diffresult
ld1 = log_density(fwd, theta_unconstrained)
Expand All @@ -70,6 +85,23 @@ zyg = DiffObjective(objectiveLowerDim, AutomaticDiffTune(:Zygote, objectiveLower
@test sum(abs.(grad_mod_fd - grad1.∇ℓθᵤ)) 0 atol = _TOL
@test sum(abs.(grad_mod_rd - grad2.∇ℓθᵤ)) 0 atol = _TOL
@test sum(abs.(grad_mod_zy - grad3.∇ℓθᵤ)) 0 atol = _TOL
## Checks
_output = check_gradients(_RNG, objectiveLowerDim, [:ForwardDiff, :ReverseDiff, :Zygote]; printoutput = false)
@test sum(abs.(_output.ℓobjective_gradient_diff)) 0 atol = _TOL
## Results
ℓDensityResult(objectiveLowerDim, theta_unconstrained)
ℓDensityResult(objectiveLowerDim)
ℓGradientResult(grad1.θᵤ , grad1.ℓθᵤ , grad1.∇ℓθᵤ)
## Update DiffTune
ModelWrappers.update(autodiff_fd, objectiveLowerDim)
ModelWrappers.update(autodiff_rd, objectiveLowerDim)
ModelWrappers.update(autodiff_zyg, objectiveLowerDim)
## Config DiffTune
theta_unconstrained2 = randn(length(objectiveLowerDim))
ModelWrappers._config(ModelWrappers.ADForward(), objectiveLowerDim, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADReverse(), objectiveLowerDim, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADReverseUntaped(), objectiveLowerDim, theta_unconstrained2)
ModelWrappers._config(ModelWrappers.ADZygote(), objectiveLowerDim, theta_unconstrained2)
end

############################################################################################
Expand Down Expand Up @@ -106,3 +138,21 @@ zyg = DiffObjective(objectiveExample2, AutomaticDiffTune(:Zygote, objectiveExamp
@test grad22.ℓθᵤ isa T && eltype(grad22.θᵤ) == eltype(grad22.∇ℓθᵤ) == T
@test grad3.ℓθᵤ isa T && eltype(grad3.θᵤ) == eltype(grad3.∇ℓθᵤ) == T
end

############################################################################################
#Tune Analytic
function fun1(objective::Objective{<:ModelWrapper{M}}, θᵤ::AbstractVector{T}) where {M<:ExampleModel, T<:Real}
return zeros(size(θᵤ))
end
θᵤ = randn(length(objectiveExample))
fun1(objectiveExample, θᵤ)
@testset "AnalyticDiffTune - " begin
tune_analytic = AnalyticalDiffTune(fun1)
ModelWrappers.update(tune_analytic, objectiveExample)
_ld = _log_density(objectiveExample, tune_analytic, θᵤ)
_ldg =_log_density_and_gradient(objectiveExample, tune_analytic, θᵤ)
@test _ld == _ldg[1]
_ldgresult = log_density_and_gradient(objectiveExample, tune_analytic, θᵤ)
@test _ld == _ldgresult.ℓθᵤ
@test all(_ldgresult.θᵤ .== θᵤ)
end
7 changes: 6 additions & 1 deletion test/test-models.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
############################################################################################
# Basic Functionality

_modelProb = ModelWrapper(ProbModel(), val_dist)
@testset "Models - basic functionality" begin
## Model Length accounting discrete parameter
Expand Down Expand Up @@ -28,6 +27,7 @@ end
## Model with transforms in lower dimensions

_modelExample = ModelWrapper(ExampleModel(), _val_examplemodel)
_tagged = Tagged(_modelExample)
@testset "Models - Model with transforms in lower dimensions" begin
## Model Length accounting discrete parameter
unconstrain(_modelExample)
Expand All @@ -48,6 +48,11 @@ _modelExample = ModelWrapper(ExampleModel(), _val_examplemodel)
## Check if densities match
@test log_prior(_modelExample) + log_abs_det_jac(_modelExample)
log_prior_with_transform(_modelExample)
## Check utility functions
@test length(_modelExample) == 23
@test ModelWrappers.paramnames(_modelExample) == keys(_val_examplemodel)
fill(_modelExample, _tagged, _modelExample.val)
fill!(_modelExample, _tagged, _modelExample.val)
end

############################################################################################
9 changes: 9 additions & 0 deletions test/test-objective.jl
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,16 @@ function (objective::Objective{<:ModelWrapper{ExampleModel}})(θ::NamedTuple)
end

@testset "Objective - Log Objective AutoDiff compatibility - Vectorized Model" begin
length(objectiveExample)
ModelWrappers.paramnames(objectiveExample)
theta_unconstrained = randn(length(modelExample))

predict(_RNG, objectiveExample)
generate(_RNG, objectiveExample)
generate(_RNG, objectiveExample, ModelWrappers.UpdateTrue())
generate(_RNG, objectiveExample, ModelWrappers.UpdateFalse())
dynamics(objectiveExample)

@test abs(
(log_prior(modelExample) + log_abs_det_jac(modelExample)) -
log_prior_with_transform(modelExample),
Expand All @@ -160,6 +168,7 @@ end

@test sum(abs.(grad_mod_fd - grad_mod_rd)) 0 atol = _TOL
@test sum(abs.(grad_mod_fd - grad_mod_zy)) 0 atol = _TOL

end

############################################################################################
11 changes: 10 additions & 1 deletion test/test-tagged.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ _targets = [Tagged(_modelProb, _syms[iter]) for iter in eachindex(_syms)]
_params = [sample(_modelProb, _targets[iter]) for iter in eachindex(_syms)]

@testset "Tagged - Model parameter" begin
for iter in eachindex(_syms)
## Assign Sub Model
for iter in eachindex(_syms)
_sym = _syms[iter]
_target = _targets[iter]
_param = _params[iter]
Expand All @@ -33,5 +33,14 @@ _params = [sample(_modelProb, _targets[iter]) for iter in eachindex(_syms)]
_θ1, _ = flatten(theta_constrained, _model_temp.info.constraint)
_θ2, _ = flatten(theta_constrained2, _target.info.constraint)
@test sum(abs.(_θ1 - _θ2)) 0 atol = _TOL
## Utility functions
subset(_model_temp, _target)
ModelWrappers.length(_target)
ModelWrappers.paramnames(_target)
fill(_model_temp, _target, _model_temp.val)
fill!(_model_temp, _target, _model_temp.val)
_model_temp.val
sample(_RNG, _model_temp, _target)
sample!(_RNG, _model_temp, _target)
end
end

2 comments on commit 289dc63

@paschermayr
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/57748

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.1.12 -m "<description of version>" 289dc639f7afcc928809ee2b72da88d1f5663375
git push origin v0.1.12

Please sign in to comment.