diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b3cd9b9..39c1a06 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,6 +15,7 @@ jobs: matrix: version: - '1.1' + - '1.6' # current LTS release - '1' # automatically expands to the latest stable 1.x release of Julia # - 'nightly' os: diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6012b2b..0000000 --- a/.travis.yml +++ /dev/null @@ -1,16 +0,0 @@ -language: julia -os: - - osx - - linux -julia: - - 1.0 - - 1 - - nightly -notifications: - email: false -matrix: - allow_failures: - - julia: nightly -sudo: false -after_success: - - julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Coveralls.submit(Coveralls.process_folder())' diff --git a/Project.toml b/Project.toml index 26cf49d..be4977c 100644 --- a/Project.toml +++ b/Project.toml @@ -19,8 +19,8 @@ StatsBase = "0.29, 0.30, 0.31, 0.32, 0.33" julia = "1.1" [extras] -Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test", "Random"] +test = ["Test", "StableRNGs"] \ No newline at end of file diff --git a/REQUIRE b/REQUIRE deleted file mode 100644 index f9519e8..0000000 --- a/REQUIRE +++ /dev/null @@ -1,3 +0,0 @@ -julia 0.7 -StatsBase 0.29 -Arpack diff --git a/test/cca.jl b/test/cca.jl index e6bae8d..0d9596c 100644 --- a/test/cca.jl +++ b/test/cca.jl @@ -1,12 +1,13 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, cov -import Random + @testset "CCA" begin - Random.seed!(34568) + rng = StableRNG(34568) dx = 5 dy = 6 @@ -14,11 +15,11 @@ import Random # CCA with zero means - X = rand(dx, 100) - Y = rand(dy, 100) + X = rand(rng, dx, 100) + Y = rand(rng, dy, 100) - Px = qr(randn(dx, dx)).Q[:, 1:p] - Py = qr(randn(dy, dy)).Q[:, 1:p] + Px = qr(randn(rng, dx, dx)).Q[:, 1:p] + Py = qr(randn(rng, dy, dy)).Q[:, 1:p] M = CCA(Float64[], Float64[], Px, Py, [0.8, 0.6, 0.4]) @@ -35,8 +36,8 @@ import Random ## CCA with nonzero means - ux = randn(dx) - uy = randn(dy) + ux = randn(rng, dx) + uy = randn(rng, dy) M = CCA(ux, uy, Px, Py, [0.8, 0.6, 0.4]) @@ -56,10 +57,10 @@ import Random n = 1000 dg = 10 - G = randn(dg, n) + G = randn(rng, dg, n) - X = randn(dx, dg) * G + 0.2 * randn(dx, n) - Y = randn(dy, dg) * G + 0.2 * randn(dy, n) + X = randn(rng, dx, dg) * G + 0.2 * randn(rng, dx, n) + Y = randn(rng, dy, dg) * G + 0.2 * randn(rng, dy, n) xm = vec(mean(X, dims=2)) ym = vec(mean(Y, dims=2)) Zx = X .- xm diff --git a/test/fa.jl b/test/fa.jl index cf39323..b8b6b00 100644 --- a/test/fa.jl +++ b/test/fa.jl @@ -1,19 +1,19 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, cov, var -import Random @testset "Factor Analysis" begin - Random.seed!(34568) + rng = StableRNG(34568) ## FA with zero mean - X = randn(5, 10) - Y = randn(3, 10) + X = randn(rng, 5, 10) + Y = randn(rng, 3, 10) - W = qr(randn(5, 5)).Q[:, 1:3] + W = qr(randn(rng, 5, 5)).Q[:, 1:3] Ψ = fill(0.1, 5) M = FactorAnalysis(Float64[], W, Ψ) @@ -34,7 +34,7 @@ import Random ## PCA with non-zero mean - mval = rand(5) + mval = rand(rng, 5) M = FactorAnalysis(mval, W, Ψ) @test indim(M) == 5 @@ -55,11 +55,11 @@ import Random d = 5 n = 1000 - R = collect(qr(randn(d, d)).Q) + R = collect(qr(randn(rng, d, d)).Q) @test R'R ≈ Matrix(I, 5, 5) rmul!(R, Diagonal(sqrt.([0.5, 0.3, 0.1, 0.05, 0.05]))) - X = R'randn(5, n) .+ randn(5) + X = R'randn(rng, 5, n) .+ randn(rng, 5) mval = vec(mean(X, dims=2)) Z = X .- mval diff --git a/test/ica.jl b/test/ica.jl index c52472c..91f436d 100644 --- a/test/ica.jl +++ b/test/ica.jl @@ -1,13 +1,13 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, cov -import Random import StatsBase @testset "ICA" begin - Random.seed!(15678) + rng = StableRNG(15678) function generatetestdata(n, k, m) t = range(0.0, step=10.0, length=n) @@ -15,14 +15,14 @@ import StatsBase s2 = s2 = 1.0 .- 2.0 * Bool[isodd(floor(Int, x / 3)) for x in t] s3 = Float64[mod(x, 5.0) for x in t] - s1 += 0.1 * randn(n) - s2 += 0.1 * randn(n) - s3 += 0.1 * randn(n) + s1 += 0.1 * randn(rng, n) + s2 += 0.1 * randn(rng, n) + s3 += 0.1 * randn(rng, n) S = hcat(s1, s2, s3)' @assert size(S) == (k, n) - A = randn(m, k) + A = randn(rng, m, k) X = A * S mv = vec(mean(X, dims=2)) diff --git a/test/kpca.jl b/test/kpca.jl index fe86bd8..b3eea5b 100644 --- a/test/kpca.jl +++ b/test/kpca.jl @@ -1,28 +1,28 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import SparseArrays import Statistics: mean, cov -import Random @testset "Kernel PCA" begin - Random.seed!(34568) + rng = StableRNG(34568) ## data n = 10 d = 5 - X = randn(d, n) + X = randn(rng, d, n) # step-by-step kernel centralization for K in [ reshape(1.:12., 3, 4), reshape(1.:9., 3, 3), reshape(1.:12., 4, 3), - rand(n,d), - rand(d,d), - rand(d,n) ] + rand(rng, n,d), + rand(rng, d,d), + rand(rng, d,n) ] x, y = size(K) I1 = ones(x,x)/x @@ -30,7 +30,7 @@ import Random Z = K - I1*K - K*I2 + I1*K*I2 KC = fit(MultivariateStats.KernelCenter, K) - @test all(isapprox.(Z, MultivariateStats.transform!(KC, copy(K)))) + @test all(Z .≈ MultivariateStats.transform!(KC, copy(K))) end # kernel calculations @@ -55,7 +55,7 @@ import Random @test all(isapprox.(MultivariateStats.transform!(KC, copy(K)), 0.0, atol=10e-7)) ## check different parameters - X = randn(d, n) + X = randn(rng, d, n) M = fit(KernelPCA, X, maxoutdim=d) M2 = fit(PCA, X, method=:cov, pratio=1.0) @test indim(M) == d @@ -72,7 +72,7 @@ import Random @test abs.(transform(M, X[:,1])) ≈ abs.(transform(M2, X[:,1])) # issue #44 - Y = randn(d, 2*n) + Y = randn(rng, d, 2*n) @test size(transform(M, Y)) == size(transform(M2, Y)) # reconstruction @@ -89,22 +89,22 @@ import Random # use precomputed kernel K = MultivariateStats.pairwise((x,y)->x'*y, eachcol(X), symmetric=true) - @test_throws AssertionError fit(KernelPCA, rand(1,10), kernel=nothing) # symmetric kernel + @test_throws AssertionError fit(KernelPCA, rand(rng, 1,10), kernel=nothing) # symmetric kernel M = fit(KernelPCA, K, maxoutdim = 5, kernel=nothing, inverse=true) # use precomputed kernel M2 = fit(PCA, X, method=:cov, pratio=1.0) @test_throws ArgumentError reconstruct(M, X) # no reconstruction for precomputed kernel @test abs.(transform(M)) ≈ abs.(transform(M2, X)) - @test_throws TypeError fit(KernelPCA, rand(1,10), kernel=1) + @test_throws TypeError fit(KernelPCA, rand(rng, 1,10), kernel=1) # different types - X = randn(Float64, d, n) + X = randn(rng, Float64, d, n) XX = convert.(Float32, X) M = fit(KernelPCA, X ; inverse=true) MM = fit(KernelPCA, XX ; inverse=true) - Y = randn(Float64, outdim(M)) + Y = randn(rng, Float64, outdim(M)) YY = convert.(Float32, Y) @test indim(MM) == d @@ -123,7 +123,7 @@ import Random reconstruct(MM, Y) ## fit a sparse matrix - X = SparseArrays.sprandn(100d, n, 0.6) + X = SparseArrays.sprandn(rng, 100d, n, 0.6) M = fit(KernelPCA, X, maxoutdim=3, solver=:eigs) @test indim(M) == 100d @test outdim(M) == 3 diff --git a/test/lda.jl b/test/lda.jl index 006ae34..1f84f0f 100644 --- a/test/lda.jl +++ b/test/lda.jl @@ -1,8 +1,8 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, cov -import Random using StatsBase """ @@ -21,7 +21,7 @@ end @testset "LDA" begin - Random.seed!(34568) + rng = StableRNG(34568) ## LinearDiscriminant @@ -42,7 +42,7 @@ end @test predict(f, x) == true @test predict(f, -x) == false - X = rand(5, 8) + X = rand(rng, 5, 8) Y = evaluate(f, X) @test size(Y) == (8,) for i = 1:8 @@ -59,8 +59,8 @@ end R2 = [cos(t2) -sin(t2); sin(t2) cos(t2)] n = 20 - Xp = Diagonal([1.2, 3.6]) * randn(2, n) .+ [1.0, -3.0] - Xn = Diagonal([2.8, 1.8]) * randn(2, n) .+ [-5.0, 2.0] + Xp = Diagonal([1.2, 3.6]) * randn(rng, 2, n) .+ [1.0, -3.0] + Xn = Diagonal([2.8, 1.8]) * randn(rng, 2, n) .+ [-5.0, 2.0] up = vec(mean(Xp, dims=2)) un = vec(mean(Xn, dims=2)) diff --git a/test/lreg.jl b/test/lreg.jl index ae161d6..b86d89a 100644 --- a/test/lreg.jl +++ b/test/lreg.jl @@ -1,11 +1,11 @@ using MultivariateStats using Test using LinearAlgebra -import Random +using StableRNGs @testset "Ridge Regression" begin - Random.seed!(34568) + rng = StableRNG(34568) ## data @@ -13,13 +13,13 @@ import Random n = 6 n2 = 3 - X = randn(m, n) - A = randn(n, n2) + X = randn(rng, m, n) + A = randn(rng, n, n2) Xt = X' - b = randn(1, n2) + b = randn(rng, 1, n2) - E = randn(m, n2) * 0.1 + E = randn(rng, m, n2) * 0.1 Y0 = X * A + E Y1 = X * A .+ b + E @@ -112,7 +112,7 @@ import Random ## ridge (with diagonal r) - r = 0.05 .+ 0.1 .* rand(n) + r = 0.05 .+ 0.1 .* rand(rng, n) A = ridge(X, Y0, r; dims=1, bias=false) A_r = copy(A) @@ -149,7 +149,7 @@ import Random ## ridge (with qudratic r matrix) - Q = qr(randn(n, n)).Q + Q = qr(randn(rng, n, n)).Q r = Q' * diagm(0=>r) * Q A = ridge(X, Y0, r; dims=1, bias=false) diff --git a/test/mclda.jl b/test/mclda.jl index e949957..80b1a1f 100644 --- a/test/mclda.jl +++ b/test/mclda.jl @@ -2,8 +2,8 @@ using MultivariateStats using LinearAlgebra using Test using StatsBase +using StableRNGs import Statistics: mean, cov -import Random @testset "Multi-class LDA" begin @@ -24,7 +24,7 @@ import Random end end - Random.seed!(34568) + rng = StableRNG(34568) ## prepare data d = 5 @@ -37,10 +37,10 @@ import Random cmeans = zeros(d, nc) for k = 1:nc - R = qr(randn(d, d)).Q + R = qr(randn(rng, d, d)).Q nk = ns[k] - Xk = R * Diagonal(2 * rand(d) .+ 0.5) * randn(d, nk) .+ randn(d) + Xk = R * Diagonal(2 * rand(rng, d) .+ 0.5) * randn(rng, d, nk) .+ randn(rng, d) yk = fill(k, nk) uk = vec(mean(Xk, dims=2)) Zk = Xk .- uk @@ -143,7 +143,7 @@ import Random centers = [zeros(5) [10.0;zeros(4)] [0.0;10.0;zeros(3)]] # Case 1: 3 groups of 500 - dX = randn(5,1500); + dX = randn(rng, 5,1500); for i = 0:500:1000 dX[:,(1:500).+i] .= dX[:,(1:500).+i] .- mean(dX[:,(1:500).+i], dims=2) # make the mean of each 0 end @@ -151,7 +151,7 @@ import Random X1 = [dX[:,1:500].+centers[:,1] dX[:,501:1000].+centers[:,2] dX[:,1001:1500].+centers[:,3]] label1 = [fill(1,500); fill(2,500); fill(3,500)] # Case 2: 3 groups, one with 1000, one with 100, and one with 10 - dX = randn(5,1110); + dX = randn(rng, 5,1110); dX[:, 1:1000] .= dX[:, 1:1000] .- mean(dX[:, 1:1000], dims=2) dX[:,1001:1100] .= dX[:,1001:1100] .- mean(dX[:,1001:1100], dims=2) dX[:,1101:1110] .= dX[:,1101:1110] .- mean(dX[:,1101:1110], dims=2) @@ -168,7 +168,7 @@ import Random @test mean(M) ≈ totcenter @test classmeans(M) ≈ centers @test classweights(M) == w - x = rand(5) + x = rand(rng, 5) @test predict(M, x) ≈ projection(M)'*x dcenters = centers .- totcenter Hb = dcenters.*sqrt.(w)' @@ -187,8 +187,8 @@ import Random end # High-dimensional case (undersampled => singularities) - X = randn(10^6, 9) - label = rand(1:3, 9); label[1:3] = 1:3 + X = randn(rng, 10^6, 9) + label = rand(rng, 1:3, 9); label[1:3] = 1:3 M = fit(SubspaceLDA, X, label) centers = M.cmeans for i = 1:3 @@ -216,7 +216,7 @@ import Random # Test normalized LDA function gen_ldadata_2(centers, n1, n2) d = size(centers, 1) - X = randn(d, n1+n2) + X = randn(rng, d, n1+n2) X[:,1:n1] .-= vec(mean(X[:,1:n1], dims=2)) X[:,n1+1:n1+n2] .-= vec(mean(X[:,n1+1:n1+n2], dims=2)) dX = copy(X) diff --git a/test/pca.jl b/test/pca.jl index 00c36f9..a8ecf41 100644 --- a/test/pca.jl +++ b/test/pca.jl @@ -1,26 +1,26 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, var, cov -import Random import SparseArrays @testset "PCA" begin - Random.seed!(34568) + rng = StableRNG(34568) ## PCA with zero mean - X = randn(5, 10) - Y = randn(3, 10) + X = randn(rng, 5, 10) + Y = randn(rng, 3, 10) - P = qr(randn(5, 5)).Q[:, 1:3] + P = qr(randn(rng, 5, 5)).Q[:, 1:3] pvars = [5., 4., 3.] - l = [-0.809509 -1.14456 0.944145 - -0.738713 -1.23353 -0.607874; - -1.64431 0.875826 -0.479549; - -0.816033 0.613632 1.06775 ; - 0.655236 0.157369 0.607475] + l = [-0.236399 -0.706558 -1.21106 + 1.27426 -0.047799 -0.212282 + 0.990598 1.22718 -0.882179 + -1.51861 0.725771 -0.631665 + 0.181386 -1.21069 -0.557706] M = PCA(Float64[], P, pvars, 15.0) @test size(M) == (5, 3) @@ -32,7 +32,7 @@ import SparseArrays @test tprincipalvar(M) == 12.0 @test tresidualvar(M) == 3.0 @test principalratio(M) == 0.8 - @test isapprox(loadings(M),l, atol = 0.001) + @test isapprox(loadings(M), l, atol = 0.001) @test predict(M, X[:,1]) ≈ P'X[:,1] @test predict(M, X) ≈ P'X @@ -43,7 +43,7 @@ import SparseArrays ## PCA with non-zero mean - mval = rand(5) + mval = rand(rng, 5) M = PCA(mval, P, pvars, 15.0) @test size(M) == (5,3) @@ -68,11 +68,11 @@ import SparseArrays d = 5 n = 1000 - R = collect(qr(randn(d, d)).Q) + R = collect(qr(randn(rng, d, d)).Q) @test R'R ≈ Matrix(I, 5, 5) rmul!(R, Diagonal(sqrt.([0.5, 0.3, 0.1, 0.05, 0.05]))) - X = R'randn(5, n) .+ randn(5) + X = R'randn(rng, 5, n) .+ randn(rng, 5) mval = vec(mean(X, dims=2)) Z = X .- mval @@ -127,7 +127,7 @@ import SparseArrays @test isapprox(C*P, P*Diagonal(pvs), atol=1.0e-3) @test issorted(pvs; rev=true) @test isapprox(pvs, pvs0, atol=1.0e-3) - @test isapprox(var(M), tv, atol=1.0e-3) + @test isapprox(var(M), tv, atol=1.0e-2) @test sum(pvs) ≈ var(M) @test reconstruct(M, predict(M, X)) ≈ X @@ -179,6 +179,6 @@ import SparseArrays M = fit(PCA, view(X, :, 1:500), pratio=0.85) # sparse - @test_throws AssertionError fit(PCA, SparseArrays.sprandn(100d, n, 0.6)) + @test_throws AssertionError fit(PCA, SparseArrays.sprandn(rng, 100d, n, 0.6)) end diff --git a/test/ppca.jl b/test/ppca.jl index 345605d..6f3a12a 100644 --- a/test/ppca.jl +++ b/test/ppca.jl @@ -1,21 +1,21 @@ using MultivariateStats using LinearAlgebra using Test +using StableRNGs import Statistics: mean, cov, var -import Random import SparseArrays import StatsBase @testset "Probabilistic PCA" begin - Random.seed!(34568) + rng = StableRNG(34568) ## PCA with zero mean - X = randn(5, 10) - Y = randn(3, 10) + X = randn(rng, 5, 10) + Y = randn(rng, 3, 10) - W = qr(randn(5, 5)).Q[:, 1:3] + W = qr(randn(rng, 5, 5)).Q[:, 1:3] σ² = 0.1 M = PPCA(Float64[], W, σ²) @@ -36,7 +36,7 @@ import StatsBase ## PCA with non-zero mean - mval = rand(5) + mval = rand(rng, 5) M = PPCA(mval, W, σ²) @test indim(M) == 5 @@ -57,11 +57,11 @@ import StatsBase d = 5 n = 1000 - R = collect(qr(randn(d, d)).Q) + R = collect(qr(randn(rng, d, d)).Q) @test R'R ≈ Matrix(I, 5, 5) rmul!(R, Diagonal(sqrt.([0.5, 0.3, 0.1, 0.05, 0.05]))) - X = R'randn(5, n) .+ randn(5) + X = R'randn(rng, 5, n) .+ randn(rng, 5) mval = vec(mean(X, dims=2)) Z = X .- mval @@ -150,10 +150,10 @@ import StatsBase # Different data types # -------------------- - X = randn(Float64, 5, 10) + X = randn(rng, Float64, 5, 10) XX = convert.(Float32, X) - Y = randn(Float64, 1, 10) + Y = randn(rng, Float64, 1, 10) YY = convert.(Float32, Y) for method in (:bayes, :em) @@ -174,11 +174,11 @@ import StatsBase end # views - X = randn(5, 200) + X = randn(rng, 5, 200) M = fit(PPCA, view(X, :, 1:100), maxoutdim=3) M = fit(PPCA, view(X, :, 1:100), maxoutdim=3, method=:em) M = fit(PPCA, view(X, :, 1:100), maxoutdim=3, method=:bayes) # sparse - @test_throws AssertionError fit(PPCA, SparseArrays.sprandn(100d, n, 0.6)) + @test_throws AssertionError fit(PPCA, SparseArrays.sprandn(rng, 100d, n, 0.6)) end diff --git a/test/whiten.jl b/test/whiten.jl index fb6209c..1963dd5 100644 --- a/test/whiten.jl +++ b/test/whiten.jl @@ -1,19 +1,19 @@ using MultivariateStats using LinearAlgebra, StatsBase, SparseArrays using Test +using StableRNGs import Statistics: mean, cov -import Random @testset "Whitening" begin - Random.seed!(34568) + rng = StableRNG(34568) ## data d = 3 n = 5 - X = rand(d, n) + X = rand(rng, d, n) mval = vec(mean(X, dims=2)) C = cov(X, dims=2) C0 = copy(C) @@ -81,7 +81,7 @@ import Random @test R ≈ inv(sqrt(C)) # mixing types - X = rand(Float64, 5, 10) + X = rand(rng, Float64, 5, 10) XX = convert.(Float32, X) M = fit(Whitening, X) @@ -96,8 +96,8 @@ import Random @test eltype(mean(MM)) == Float32 # sparse arrays - SX = sprand(Float32, d, n, 0.75) - SM = fit(Whitening, SX; mean=sprand(Float32, 3, 0.75)) + SX = sprand(rng, Float32, d, n, 0.75) + SM = fit(Whitening, SX; mean=sprand(rng, Float32, 3, 0.75)) Y = transform(SM, SX) @test eltype(Y) == Float32 @@ -106,12 +106,12 @@ import Random M1 = fit(Whitening, X'; dims=1) M2 = fit(Whitening, X; dims=2) @test M1.W == M2.W - @test_throws DimensionMismatch transform(M1, rand(6,4)) - @test_throws DimensionMismatch transform(M2, rand(4,6)) + @test_throws DimensionMismatch transform(M1, rand(rng, 6,4)) + @test_throws DimensionMismatch transform(M2, rand(rng, 4,6)) Y1 = transform(M1,X') Y2 = transform(M2,X) @test Y1' == Y2 - @test_throws DimensionMismatch transform(M1, rand(7)) + @test_throws DimensionMismatch transform(M1, rand(rng, 7)) V1 = transform(M1,X[:,1]) V2 = transform(M2,X[:,1]) @test V1 == V2