From dfa4dcccb55554ef0ae10ddea3aefb8a981cec7e Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Thu, 25 May 2017 18:12:58 -0700 Subject: [PATCH] Fix activation functions --- src/layers/activation.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/layers/activation.jl b/src/layers/activation.jl index e7d9cc8d..f3324aa5 100644 --- a/src/layers/activation.jl +++ b/src/layers/activation.jl @@ -1,12 +1,12 @@ export σ, relu, softmax, flatten +# Sigmoid σ(x) = 1 ./ (1 + exp.(-x)) +back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x)) -back!(::typeof(σ), Δ, x) = Δ .* σ(x)./(1.-σ(x)) - +# Rectified Linear Unit relu(x) = max(0, x) - -back!(::typeof(relu), Δ, x) = Δ .* (x .< 0) +back!(::typeof(relu), Δ, x) = Δ .* (x .> 0) # TODO: correct behaviour with batches softmax(xs) = exp.(xs) ./ sum(exp.(xs))