Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: denizyuret/AutoGrad.jl
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: v1.1.6
Choose a base ref
...
head repository: denizyuret/AutoGrad.jl
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: v1.2.0
Choose a head ref
  • 6 commits
  • 6 files changed
  • 1 contributor

Commits on Oct 11, 2019

  1. sparsebugs wip

    denizyuret committed Oct 11, 2019
    Copy the full SHA
    aad2f86 View commit details

Commits on Oct 13, 2019

  1. lmul! ambiguity fix

    denizyuret committed Oct 13, 2019
    Copy the full SHA
    cefbf54 View commit details
  2. tests for #114

    denizyuret committed Oct 13, 2019
    Copy the full SHA
    c159788 View commit details

Commits on Oct 17, 2019

  1. Merge pull request #115 from denizyuret/dy/sparsebugs

    Fixing #114 Sparse bugs
    denizyuret authored Oct 17, 2019
    2

    Verified

    This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
    Copy the full SHA
    b930ec4 View commit details

Commits on Oct 23, 2019

  1. Project.toml compat

    denizyuret committed Oct 23, 2019
    2
    Copy the full SHA
    2b66ccd View commit details

Commits on Oct 25, 2019

  1. v1.2.0 news updates

    denizyuret committed Oct 25, 2019
    2
    Copy the full SHA
    b6d4e68 View commit details
Showing with 48 additions and 6 deletions.
  1. +7 −0 NEWS.md
  2. +4 −2 Project.toml
  3. +4 −2 src/addto.jl
  4. +7 −2 src/sparse.jl
  5. +1 −0 test/runtests.jl
  6. +25 −0 test/sparse.jl
7 changes: 7 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
AutoGrad v1.2.0 Release Notes
=============================

* Enhancements and bugfixes for the AutoGrad.Sparse type.


AutoGrad v1.1.6 Release Notes
=============================
1dcce22 2019-09-30

* Gradients returned can now be of type AutoGrad.Sparse to make large lookup parameters more efficient.
* Refactoring: UngetIndex -> Sparse, sum_outgrad -> addto!, Tape.list back to normal order.
6 changes: 4 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "AutoGrad"
uuid = "6710c13c-97f1-543f-91c5-74e8f7d95b35"
authors = ["Deniz Yuret <denizyuret@gmail.com>"]
version = "1.1.6"
version = "1.2.0"

[deps]
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
@@ -11,7 +11,9 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"

[compat]
julia = "≥ 0.7.0"
julia = "1.0.0"
SpecialFunctions = "0.8.0"
TimerOutputs = "0.5.0"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
6 changes: 4 additions & 2 deletions src/addto.jl
Original file line number Diff line number Diff line change
@@ -37,10 +37,12 @@ matches(::AbstractDict,::AbstractDict)=true
matches(a::Tuple,b::Tuple)=(length(a)===length(b))
matches(a::AbstractArray,b::AbstractArray)=(size(a)==size(b))

## If both accumulator and newval are sparse, merge:
## If both accumulator and newval are sparse, merge, modifying first arg. Use + if you do not want to modify:
function addto!(a::Sparse, b::Sparse)
@assert matches(a.container, b.container) "$(summary.((a.container, b.container)))"
Sparse(a.container, [ a.values; b.values ], [ a.indices; b.indices ])
append!(a.values, b.values)
append!(a.indices, b.indices)
return a
end

## If sparse is the accumulator, reverse:
9 changes: 7 additions & 2 deletions src/sparse.jl
Original file line number Diff line number Diff line change
@@ -47,8 +47,8 @@ end

# These are used in Knet/src/update.jl:
import LinearAlgebra: axpy!, norm, lmul!
axpy!(a, x::Sparse, y::AbstractArray) = addto!(y, a*x)
lmul!(a, x::Sparse{T,N}) where {T,N} = Sparse{T,N}(x.container, [ a*v for v in x.values ], x.indices)
axpy!(a::Number, x::Sparse, y::AbstractArray) = addto!(y, a*x)
lmul!(a::Number, x::Sparse{T,N}) where {T,N} = Sparse{T,N}(x.container, [ a*v for v in x.values ], x.indices)

# This does not give the correct result when there are repeated indices, but should be good enough for gclip
norm(x::Sparse) = sqrt(sum(abs2, norm(v) for v in x.values))
@@ -76,6 +76,11 @@ broadcasted(::typeof(/), s::Sparse, n::Number) = Sparse(s.container, [ v./n for
-(s::Sparse, a::AbstractArray) = addto!(-a, s)
-(s::Sparse) = -1*s

# Issue #114: we may need to add multiple gradients
function +(a::Sparse, b::Sparse)
@assert matches(a.container, b.container) "$(summary.((a.container, b.container)))"
Sparse(a.container, [ a.values; b.values ], [ a.indices; b.indices ])
end

# Do we need these?
# sum(b::Sparse)=sum(sum(v) for v in b.values)
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -15,6 +15,7 @@ using Random; Random.seed!(1)
#TODO include("params.jl")
@time include("rosenbrock.jl")
#TODO include("show.jl")
@time include("sparse.jl")
@time include("specialfunctions.jl")
@time include("statistics.jl")
#TODO include("unbroadcast.jl")
25 changes: 25 additions & 0 deletions test/sparse.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
include("header.jl")

@testset "sparse" begin

# Issue #114 (a): plus for Sparse.
using AutoGrad: Sparse, full, addto!
a = Sparse(zeros(3,4), [ [1.,1.], [1.,1.], 1., 1. ], [ ([1,2],), (3:4,), (2,2), (1,) ])
b = a + a
@test b isa Sparse
@test full(b) == full(a) + full(a)
addto!(a, a)
@test a isa Sparse
@test full(a) == full(b)
b = a + a
a .+= a
@test full(a) == full(b)

# Issue #114 (b): lmul! ambiguous for Sparse, breaks gclip.
using LinearAlgebra
foo(w) = (s = 0.0; for i=1:length(w); s+=w[i]; end; s)
w = Param(randn(2,2))
J = @diff foo(w)
@test lmul!(0.5, grad(J,w)) isa Sparse

end