Fix activation functions
This commit is contained in:
parent
e37973c3d5
commit
dfa4dcccb5
@ -1,12 +1,12 @@
|
||||
export σ, relu, softmax, flatten
|
||||
|
||||
# Sigmoid
|
||||
σ(x) = 1 ./ (1 + exp.(-x))
|
||||
back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x))
|
||||
|
||||
back!(::typeof(σ), Δ, x) = Δ .* σ(x)./(1.-σ(x))
|
||||
|
||||
# Rectified Linear Unit
|
||||
relu(x) = max(0, x)
|
||||
|
||||
back!(::typeof(relu), Δ, x) = Δ .* (x .< 0)
|
||||
back!(::typeof(relu), Δ, x) = Δ .* (x .> 0)
|
||||
|
||||
# TODO: correct behaviour with batches
|
||||
softmax(xs) = exp.(xs) ./ sum(exp.(xs))
|
||||
|
Loading…
Reference in New Issue
Block a user