diff --git a/src/layers/activation.jl b/src/layers/activation.jl index e7d9cc8d..f3324aa5 100644 --- a/src/layers/activation.jl +++ b/src/layers/activation.jl @@ -1,12 +1,12 @@ export σ, relu, softmax, flatten +# Sigmoid σ(x) = 1 ./ (1 + exp.(-x)) +back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x)) -back!(::typeof(σ), Δ, x) = Δ .* σ(x)./(1.-σ(x)) - +# Rectified Linear Unit relu(x) = max(0, x) - -back!(::typeof(relu), Δ, x) = Δ .* (x .< 0) +back!(::typeof(relu), Δ, x) = Δ .* (x .> 0) # TODO: correct behaviour with batches softmax(xs) = exp.(xs) ./ sum(exp.(xs))