Flux.jl/src/layers/activation.jl

23 lines
556 B
Julia
Raw Normal View History

2016-09-06 17:03:39 +00:00
export σ, relu, softmax, flatten
2016-08-23 13:28:54 +00:00
2017-05-26 01:12:58 +00:00
# Sigmoid
2016-12-15 22:57:36 +00:00
σ(x) = 1 ./ (1 + exp.(-x))
2017-05-26 01:12:58 +00:00
back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x))
2016-08-24 14:41:30 +00:00
2017-05-26 01:12:58 +00:00
# Rectified Linear Unit
2016-08-25 16:26:07 +00:00
relu(x) = max(0, x)
2017-05-26 01:12:58 +00:00
back!(::typeof(relu), Δ, x) = Δ .* (x .> 0)
2016-08-25 16:26:07 +00:00
2017-03-17 16:34:51 +00:00
# TODO: correct behaviour with batches
2016-08-25 21:49:21 +00:00
softmax(xs) = exp.(xs) ./ sum(exp.(xs))
2016-09-06 17:03:39 +00:00
2017-03-17 16:34:51 +00:00
# TODO: correct behaviour with batches
2016-09-06 17:03:39 +00:00
flatten(xs) = reshape(xs, length(xs))
2017-01-16 00:21:45 +00:00
infer(::typeof(softmax), x) = x
2017-03-17 16:34:51 +00:00
infer(::typeof(tanh), x) = x
2017-03-20 19:57:11 +00:00
infer(::typeof(relu), x) = x
2017-01-16 00:21:45 +00:00
infer(::typeof(σ), x) = x
2017-03-17 16:34:51 +00:00
infer(::typeof(flatten), x::Dims) = (x[1], prod(x[2:end])...)