softmax gradient

This commit is contained in:
Mike J Innes 2017-08-23 02:03:17 +01:00
parent 5eee653a64
commit 23c5a1b163
2 changed files with 12 additions and 1 deletions

View File

@ -36,6 +36,14 @@ function back!(::typeof(*), Δ, a::AbstractMatrix, b::AbstractVecOrMat)
@back!(b, At_mul_B(data(a), Δ))
end
# NNlib
import NNlib: softmax, ∇softmax
softmax(xs::TrackedArray) = TrackedArray(Call(softmax, xs))
back!(::typeof(softmax), Δ, xs) = @back!(xs, ∇softmax(Δ, data(xs)))
# Broadcasting
using ForwardDiff: Dual, partials

View File

@ -1,5 +1,5 @@
using Flux.Tracker, Base.Test, NNlib
using Flux.Tracker: gradcheck
using Base.Test, NNlib
gradtest(f, xs::AbstractArray...) = gradcheck((xs...) -> sum(f(xs...)), xs...)
gradtest(f, dims...) = gradtest(f, rand.(dims)...)
@ -11,4 +11,7 @@ gradtest(f, dims...) = gradtest(f, rand.(dims)...)
@test gradtest(x -> sin.(sum(x, (2, 3))), (3,4,5))
gradtest(x -> softmax(x).*(1:3), 3)
gradtest(x -> softmax(x).*(1:3), (3,5))
end