use regular +

This commit is contained in:
Mike J Innes 2016-12-15 22:57:36 +00:00
parent 5b8ca7b9db
commit 6114b70f76
5 changed files with 9 additions and 9 deletions

View File

@ -1,6 +1,6 @@
export σ, relu, softmax, flatten
σ(x) = 1 ./ (1 .+ exp.(-x))
σ(x) = 1 ./ (1 + exp.(-x))
back!(::typeof(σ), Δ, x) = Δ .* σ(x)./(1.-σ(x))

View File

@ -17,7 +17,7 @@ graph(::typeof(relu), x) = nn.relu(x)
graph(::typeof(σ), x) = nn.sigmoid(x)
graph(::typeof(hcat), xs...) = concat(1, xs)
graph(::typeof(seq), xs, n) = TensorFlow.unpack(xs, num = n, axis = 1)
graph(::typeof(.+), args...) = +(args...)
graph(::typeof(+), args...) = +(args...)
for op in (tanh, *, .*, +, -)
@eval graph(::typeof($op), args...) = $op(node(args)...)

View File

@ -5,7 +5,7 @@ export Affine
@net type Affine
W
b
x -> x*W .+ b
x -> x*W + b
end
Affine(in::Integer, out::Integer; init = initn) =

View File

@ -36,12 +36,12 @@ GatedRecurrent(in, out; init = initn) =
y; state
function (x)
# Gates
forget = σ( x * Wxf .+ y{-1} * Wyf .+ bf )
input = σ( x * Wxi .+ y{-1} * Wyi .+ bi )
output = σ( x * Wxo .+ y{-1} * Wyo .+ bo )
forget = σ( x * Wxf + y{-1} * Wyf + bf )
input = σ( x * Wxi + y{-1} * Wyi + bi )
output = σ( x * Wxo + y{-1} * Wyo + bo )
# State update and output
state = tanh( x * Wxc .+ y{-1} * Wyc .+ bc )
state = forget .* state{-1} .+ input .* state
state = tanh( x * Wxc + y{-1} * Wyc + bc )
state = forget .* state{-1} + input .* state
y = output .* tanh(state)
end
end

View File

@ -74,7 +74,7 @@ Accumulates the update `Δ` on `p`. The value of `p` won't change until
`update!`.
"""
function accumulate!(p::Param, Δ)
p.Δx .+= Δ
p.Δx += Δ
return p
end