backward pass shim

This commit is contained in:
Mike J Innes 2016-09-28 17:15:41 +01:00
parent b662df6ce1
commit 74dc86bfbf
1 changed files with 12 additions and 1 deletions

View File

@ -35,6 +35,7 @@ type Model
session::Session
inputs::Vector{Tensor}
graph::Tensor
grad::Tensor
end
Media.render(::Juno.Clipboard, ::Model) = "Flux.TF.Model()"
@ -44,7 +45,7 @@ function tf(model)
input = placeholder(Float64)
g = graph(model, input)
run(sess, initialize_all_variables())
Model(sess, [input], g)
Model(sess, [input], g, gradients(g, input))
end
function (m::Model)(args...)
@ -52,6 +53,16 @@ function (m::Model)(args...)
run(m.session, m.graph, Dict(zip(m.inputs, args)))
end
function Flux.back!(m::Model, Δ, args...)
@assert length(args) == length(m.inputs)
# TODO: keyword arguments to `gradients`
run(m.session, m.grad, Dict(zip(m.inputs, args)))
end
function Flux.update!(m::Model)
error("update! is not yet supported on TensorFlow models")
end
# m = Flux.Dense(784, 10)
# t = tf(m)
# t(randn(784,1))