Flux.jl/src/layers/activation.jl

12 lines
273 B
Julia
Raw Normal View History

2017-05-26 01:12:58 +00:00
# Sigmoid
2016-12-15 22:57:36 +00:00
σ(x) = 1 ./ (1 + exp.(-x))
2017-08-19 19:05:22 +00:00
# back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x))
2016-08-24 14:41:30 +00:00
2017-05-26 01:12:58 +00:00
# Rectified Linear Unit
2016-08-25 16:26:07 +00:00
relu(x) = max(0, x)
2017-08-19 19:05:22 +00:00
# back!(::typeof(relu), Δ, x) = Δ .* (x .> 0)
2016-08-25 16:26:07 +00:00
2017-06-01 18:28:02 +00:00
softmax(xs) = exp.(xs) ./ sum(exp.(xs), 2)
2016-09-06 17:03:39 +00:00
2017-06-01 18:28:02 +00:00
flatten(xs) = reshape(xs, size(xs, 1), :)