this served its purpose

This commit is contained in:
Mike J Innes 2016-08-23 10:43:30 +01:00
parent 54dd0a1e0e
commit 4ad4e20c3e
1 changed files with 0 additions and 57 deletions

View File

@ -1,57 +0,0 @@
# Simple Perceptron Layer
@flux type Simple
weight
bias
x -> σ( weight*x + bias )
end
Simple(nx::Integer, ny::Integer; init = randn) =
Simple(init(nx, ny), init(ny))
# Time Delay Node
type Delay
n::Int
next
end
# feed(l::Delay, x) = ...
# back(l::Delay, y) = ...
# Simple Recurrent
@flux type RecurrentU
Wxh; Whh; Bh
Wxy; Why; By
function feed(x, hidden)
hidden = σ( Wxh*x + Whh*hidden + Bh )
y = σ( Wxy*x + Why*hidden + By )
y, hidden
end
end
Recurrent(nx, ny, nh; init = randn) =
Recurrent(init(nx, nh), init(nh, nh), init(nh),
init(nx, ny), init(nh, ny), init(ny))
@flux type Looped{T}
delay::Delay
layer::T
function (x)
y, hidden = layer(x, delay(hidden))
return y
end
end
type Recurrent
layer::Looped{RecurrentU}
end
Recurrent(nx, ny, nh; init = randn, delay = 10) =
Looped(Delay(delay, init(nh)), RecurrentU(nx, ny, nh))
@forward Recurrent.layer feed