Fix activation functions

This commit is contained in:
Elliot Saba 2017-05-25 18:12:58 -07:00
parent e37973c3d5
commit dfa4dcccb5

View File

@ -1,12 +1,12 @@
export σ, relu, softmax, flatten
# Sigmoid
σ(x) = 1 ./ (1 + exp.(-x))
back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x))
back!(::typeof(σ), Δ, x) = Δ .* σ(x)./(1.-σ(x))
# Rectified Linear Unit
relu(x) = max(0, x)
back!(::typeof(relu), Δ, x) = Δ .* (x .< 0)
back!(::typeof(relu), Δ, x) = Δ .* (x .> 0)
# TODO: correct behaviour with batches
softmax(xs) = exp.(xs) ./ sum(exp.(xs))