Skip to content

Commit

Permalink
Switch to StableRNGs for tests (#169)
Browse files Browse the repository at this point in the history
* some cleanup
* use StableRNGs
* cca doesn't need Random
* revert to test deps in main Project.toml
* change to \approx
  • Loading branch information
kescobo authored Jan 17, 2022
1 parent c0f74de commit 446239f
Show file tree
Hide file tree
Showing 14 changed files with 103 additions and 120 deletions.
1 change: 1 addition & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ jobs:
matrix:
version:
- '1.1'
- '1.6' # current LTS release
- '1' # automatically expands to the latest stable 1.x release of Julia
# - 'nightly'
os:
Expand Down
16 changes: 0 additions & 16 deletions .travis.yml

This file was deleted.

4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ StatsBase = "0.29, 0.30, 0.31, 0.32, 0.33"
julia = "1.1"

[extras]
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test", "Random"]
test = ["Test", "StableRNGs"]
3 changes: 0 additions & 3 deletions REQUIRE

This file was deleted.

23 changes: 12 additions & 11 deletions test/cca.jl
Original file line number Diff line number Diff line change
@@ -1,24 +1,25 @@
using MultivariateStats
using LinearAlgebra
using Test
using StableRNGs
import Statistics: mean, cov
import Random


@testset "CCA" begin

Random.seed!(34568)
rng = StableRNG(34568)

dx = 5
dy = 6
p = 3

# CCA with zero means

X = rand(dx, 100)
Y = rand(dy, 100)
X = rand(rng, dx, 100)
Y = rand(rng, dy, 100)

Px = qr(randn(dx, dx)).Q[:, 1:p]
Py = qr(randn(dy, dy)).Q[:, 1:p]
Px = qr(randn(rng, dx, dx)).Q[:, 1:p]
Py = qr(randn(rng, dy, dy)).Q[:, 1:p]

M = CCA(Float64[], Float64[], Px, Py, [0.8, 0.6, 0.4])

Expand All @@ -35,8 +36,8 @@ import Random

## CCA with nonzero means

ux = randn(dx)
uy = randn(dy)
ux = randn(rng, dx)
uy = randn(rng, dy)

M = CCA(ux, uy, Px, Py, [0.8, 0.6, 0.4])

Expand All @@ -56,10 +57,10 @@ import Random

n = 1000
dg = 10
G = randn(dg, n)
G = randn(rng, dg, n)

X = randn(dx, dg) * G + 0.2 * randn(dx, n)
Y = randn(dy, dg) * G + 0.2 * randn(dy, n)
X = randn(rng, dx, dg) * G + 0.2 * randn(rng, dx, n)
Y = randn(rng, dy, dg) * G + 0.2 * randn(rng, dy, n)
xm = vec(mean(X, dims=2))
ym = vec(mean(Y, dims=2))
Zx = X .- xm
Expand Down
16 changes: 8 additions & 8 deletions test/fa.jl
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
using MultivariateStats
using LinearAlgebra
using Test
using StableRNGs
import Statistics: mean, cov, var
import Random

@testset "Factor Analysis" begin

Random.seed!(34568)
rng = StableRNG(34568)

## FA with zero mean

X = randn(5, 10)
Y = randn(3, 10)
X = randn(rng, 5, 10)
Y = randn(rng, 3, 10)

W = qr(randn(5, 5)).Q[:, 1:3]
W = qr(randn(rng, 5, 5)).Q[:, 1:3]
Ψ = fill(0.1, 5)
M = FactorAnalysis(Float64[], W, Ψ)

Expand All @@ -34,7 +34,7 @@ import Random

## PCA with non-zero mean

mval = rand(5)
mval = rand(rng, 5)
M = FactorAnalysis(mval, W, Ψ)

@test indim(M) == 5
Expand All @@ -55,11 +55,11 @@ import Random
d = 5
n = 1000

R = collect(qr(randn(d, d)).Q)
R = collect(qr(randn(rng, d, d)).Q)
@test R'R Matrix(I, 5, 5)
rmul!(R, Diagonal(sqrt.([0.5, 0.3, 0.1, 0.05, 0.05])))

X = R'randn(5, n) .+ randn(5)
X = R'randn(rng, 5, n) .+ randn(rng, 5)
mval = vec(mean(X, dims=2))
Z = X .- mval

Expand Down
12 changes: 6 additions & 6 deletions test/ica.jl
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
using MultivariateStats
using LinearAlgebra
using Test
using StableRNGs
import Statistics: mean, cov
import Random
import StatsBase

@testset "ICA" begin

Random.seed!(15678)
rng = StableRNG(15678)

function generatetestdata(n, k, m)
t = range(0.0, step=10.0, length=n)
s1 = sin.(t * 2)
s2 = s2 = 1.0 .- 2.0 * Bool[isodd(floor(Int, x / 3)) for x in t]
s3 = Float64[mod(x, 5.0) for x in t]

s1 += 0.1 * randn(n)
s2 += 0.1 * randn(n)
s3 += 0.1 * randn(n)
s1 += 0.1 * randn(rng, n)
s2 += 0.1 * randn(rng, n)
s3 += 0.1 * randn(rng, n)

S = hcat(s1, s2, s3)'
@assert size(S) == (k, n)

A = randn(m, k)
A = randn(rng, m, k)

X = A * S
mv = vec(mean(X, dims=2))
Expand Down
28 changes: 14 additions & 14 deletions test/kpca.jl
Original file line number Diff line number Diff line change
@@ -1,36 +1,36 @@
using MultivariateStats
using LinearAlgebra
using Test
using StableRNGs
import SparseArrays
import Statistics: mean, cov
import Random


@testset "Kernel PCA" begin

Random.seed!(34568)
rng = StableRNG(34568)

## data
n = 10
d = 5
X = randn(d, n)
X = randn(rng, d, n)

# step-by-step kernel centralization
for K in [
reshape(1.:12., 3, 4),
reshape(1.:9., 3, 3),
reshape(1.:12., 4, 3),
rand(n,d),
rand(d,d),
rand(d,n) ]
rand(rng, n,d),
rand(rng, d,d),
rand(rng, d,n) ]

x, y = size(K)
I1 = ones(x,x)/x
I2 = ones(y,y)/y
Z = K - I1*K - K*I2 + I1*K*I2

KC = fit(MultivariateStats.KernelCenter, K)
@test all(isapprox.(Z, MultivariateStats.transform!(KC, copy(K))))
@test all(Z .≈ MultivariateStats.transform!(KC, copy(K)))
end

# kernel calculations
Expand All @@ -55,7 +55,7 @@ import Random
@test all(isapprox.(MultivariateStats.transform!(KC, copy(K)), 0.0, atol=10e-7))

## check different parameters
X = randn(d, n)
X = randn(rng, d, n)
M = fit(KernelPCA, X, maxoutdim=d)
M2 = fit(PCA, X, method=:cov, pratio=1.0)
@test indim(M) == d
Expand All @@ -72,7 +72,7 @@ import Random
@test abs.(transform(M, X[:,1])) abs.(transform(M2, X[:,1]))

# issue #44
Y = randn(d, 2*n)
Y = randn(rng, d, 2*n)
@test size(transform(M, Y)) == size(transform(M2, Y))

# reconstruction
Expand All @@ -89,22 +89,22 @@ import Random

# use precomputed kernel
K = MultivariateStats.pairwise((x,y)->x'*y, eachcol(X), symmetric=true)
@test_throws AssertionError fit(KernelPCA, rand(1,10), kernel=nothing) # symmetric kernel
@test_throws AssertionError fit(KernelPCA, rand(rng, 1,10), kernel=nothing) # symmetric kernel
M = fit(KernelPCA, K, maxoutdim = 5, kernel=nothing, inverse=true) # use precomputed kernel
M2 = fit(PCA, X, method=:cov, pratio=1.0)
@test_throws ArgumentError reconstruct(M, X) # no reconstruction for precomputed kernel
@test abs.(transform(M)) abs.(transform(M2, X))

@test_throws TypeError fit(KernelPCA, rand(1,10), kernel=1)
@test_throws TypeError fit(KernelPCA, rand(rng, 1,10), kernel=1)

# different types
X = randn(Float64, d, n)
X = randn(rng, Float64, d, n)
XX = convert.(Float32, X)

M = fit(KernelPCA, X ; inverse=true)
MM = fit(KernelPCA, XX ; inverse=true)

Y = randn(Float64, outdim(M))
Y = randn(rng, Float64, outdim(M))
YY = convert.(Float32, Y)

@test indim(MM) == d
Expand All @@ -123,7 +123,7 @@ import Random
reconstruct(MM, Y)

## fit a sparse matrix
X = SparseArrays.sprandn(100d, n, 0.6)
X = SparseArrays.sprandn(rng, 100d, n, 0.6)
M = fit(KernelPCA, X, maxoutdim=3, solver=:eigs)
@test indim(M) == 100d
@test outdim(M) == 3
Expand Down
10 changes: 5 additions & 5 deletions test/lda.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
using MultivariateStats
using LinearAlgebra
using Test
using StableRNGs
import Statistics: mean, cov
import Random
using StatsBase

"""
Expand All @@ -21,7 +21,7 @@ end

@testset "LDA" begin

Random.seed!(34568)
rng = StableRNG(34568)

## LinearDiscriminant

Expand All @@ -42,7 +42,7 @@ end
@test predict(f, x) == true
@test predict(f, -x) == false

X = rand(5, 8)
X = rand(rng, 5, 8)
Y = evaluate(f, X)
@test size(Y) == (8,)
for i = 1:8
Expand All @@ -59,8 +59,8 @@ end
R2 = [cos(t2) -sin(t2); sin(t2) cos(t2)]

n = 20
Xp = Diagonal([1.2, 3.6]) * randn(2, n) .+ [1.0, -3.0]
Xn = Diagonal([2.8, 1.8]) * randn(2, n) .+ [-5.0, 2.0]
Xp = Diagonal([1.2, 3.6]) * randn(rng, 2, n) .+ [1.0, -3.0]
Xn = Diagonal([2.8, 1.8]) * randn(rng, 2, n) .+ [-5.0, 2.0]

up = vec(mean(Xp, dims=2))
un = vec(mean(Xn, dims=2))
Expand Down
16 changes: 8 additions & 8 deletions test/lreg.jl
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
using MultivariateStats
using Test
using LinearAlgebra
import Random
using StableRNGs

@testset "Ridge Regression" begin

Random.seed!(34568)
rng = StableRNG(34568)

## data

m = 9
n = 6
n2 = 3

X = randn(m, n)
A = randn(n, n2)
X = randn(rng, m, n)
A = randn(rng, n, n2)
Xt = X'

b = randn(1, n2)
b = randn(rng, 1, n2)

E = randn(m, n2) * 0.1
E = randn(rng, m, n2) * 0.1
Y0 = X * A + E
Y1 = X * A .+ b + E

Expand Down Expand Up @@ -112,7 +112,7 @@ import Random

## ridge (with diagonal r)

r = 0.05 .+ 0.1 .* rand(n)
r = 0.05 .+ 0.1 .* rand(rng, n)

A = ridge(X, Y0, r; dims=1, bias=false)
A_r = copy(A)
Expand Down Expand Up @@ -149,7 +149,7 @@ import Random

## ridge (with qudratic r matrix)

Q = qr(randn(n, n)).Q
Q = qr(randn(rng, n, n)).Q
r = Q' * diagm(0=>r) * Q

A = ridge(X, Y0, r; dims=1, bias=false)
Expand Down
Loading

0 comments on commit 446239f

Please sign in to comment.