Flux.jl/src/layers/recurrent.jl

35 lines
798 B
Julia
Raw Normal View History

2016-10-31 11:01:19 +00:00
export Recurrent, LSTM
2016-10-30 11:41:52 +00:00
2016-10-28 23:13:32 +00:00
@net type Recurrent
2016-10-30 16:07:18 +00:00
Wxy; Wyy; by
y
2016-10-28 23:13:32 +00:00
function (x)
2016-10-30 16:07:18 +00:00
y = tanh( x * Wxy + y * Wyy + by )
2016-10-28 23:13:32 +00:00
end
end
2016-10-30 16:07:18 +00:00
Recurrent(in, out; init = initn) =
Recurrent(init((in, out)), init((out, out)), init(out), init(out))
2016-10-31 11:01:19 +00:00
@net type LSTM
Wxf; Wyf; bf
Wxi; Wyi; bi
Wxo; Wyo; bo
Wxc; Wyc; bc
y; state
function (x)
# Gates
forget = σ( x * Wxf + y * Wyf + bf )
input = σ( x * Wxi + y * Wyi + bi )
output = σ( x * Wxo + y * Wyo + bo )
# State update and output
state = tanh( x * Wxc + y * Wyc + bc )
state = forget .* state + input .* state
y = output .* tanh(state)
end
end
LSTM(in, out; init = initn) =
LSTM(vcat([[init((in, out)), init((out, out)), init(out)] for _ = 1:4]...)...,
zeros(Float32, out), zeros(Float32, out))