Flux.jl/src/backend/tensorflow/model.jl

81 lines
2.0 KiB
Julia
Raw Normal View History

2016-10-26 13:25:10 +00:00
type Model
2016-10-28 20:17:48 +00:00
model::Any
2016-10-26 13:25:10 +00:00
session::Session
2016-10-28 15:06:56 +00:00
params::Dict{Flux.Param,Tensor}
2016-10-26 13:25:10 +00:00
inputs::Vector{Tensor}
2016-10-28 20:17:48 +00:00
output::Any
2016-10-26 13:25:10 +00:00
end
2016-12-20 17:18:40 +00:00
function makesession(model, inputs; session = Session(Graph()))
params, output = tograph(model, inputs...)
run(session, initialize_all_variables())
Model(model, session, params, inputs, output)
end
function makesession(model, n::Integer; session = Session(Graph()))
makesession(model, [placeholder(Float32) for _ = 1:n], session = session)
end
2016-10-28 19:50:27 +00:00
2016-12-20 17:18:40 +00:00
tf(model) = makesession(model, 1)
function storeparams!(sess, params)
for (p, t) in params
p.x = run(sess, t)
end
2016-10-26 13:25:10 +00:00
end
2016-12-20 17:18:40 +00:00
storeparams!(m::Model) = storeparams!(m.session, m.params)
ismultioutput(m::Model) = !isa(m.output, Tensor)
2016-10-28 16:00:31 +00:00
function batch(xs)
dims = ndims(xs)-1
T = Array{eltype(xs),dims}
B = Array{eltype(xs),dims+1}
Batch{T,B}(xs)
end
2016-10-26 13:25:10 +00:00
2016-11-15 23:44:11 +00:00
function runmodel(m, args...)
2016-10-26 13:25:10 +00:00
@assert length(args) == length(m.inputs)
2016-10-28 19:50:27 +00:00
output = run(m.session, m.output, Dict(zip(m.inputs, args)))
ismultioutput(m) ? (batch.(output)...,) : batch(output)
2016-10-26 13:25:10 +00:00
end
2016-11-15 23:44:11 +00:00
function (m::Model)(args::Batch...)
runmodel(m, args...)
end
2016-10-28 19:50:27 +00:00
function (m::Model)(args...)
output = m(map(batchone, args)...)
ismultioutput(m) ? map(first, output) : first(output)
end
2016-10-26 13:25:10 +00:00
2016-12-20 16:37:43 +00:00
for f in :[back!, update!].args
@eval function Flux.$f(m::Model, args...)
error($(string(f)) * " is not yet supported on TensorFlow models")
end
2016-10-26 13:25:10 +00:00
end
import Juno: info
function Flux.train!(m::Model, train, test=[]; epoch = 1, η = 0.1,
loss = (y, y) -> reduce_sum((y - y).^2)/2,
opt = TensorFlow.train.GradientDescentOptimizer(η))
i = 0
Y = placeholder(Float32)
2016-10-30 12:10:44 +00:00
Loss = loss(m.output, Y)
2016-10-26 13:25:10 +00:00
minimize_op = TensorFlow.train.minimize(opt, Loss)
for e in 1:epoch
info("Epoch $e\n")
@progress for (x, y) in train
2016-10-30 12:10:44 +00:00
y, cur_loss, _ = run(m.session, vcat(m.output, Loss, minimize_op),
2016-10-28 16:00:31 +00:00
Dict(m.inputs[1]=>batchone(x), Y=>batchone(y)))
2016-10-26 13:25:10 +00:00
if i % 5000 == 0
@show y
@show accuracy(m, test)
end
i += 1
end
end
end