2017-09-03 06:44:32 +00:00
|
|
|
using Juno
|
2017-08-31 16:36:18 +00:00
|
|
|
using Flux.Tracker: back!
|
|
|
|
|
2017-11-04 12:27:32 +00:00
|
|
|
runall(f) = f
|
|
|
|
runall(fs::AbstractVector) = () -> foreach(call, fs)
|
2017-09-07 04:27:16 +00:00
|
|
|
|
2017-10-11 11:26:40 +00:00
|
|
|
"""
|
|
|
|
train!(loss, data, opt; cb = () -> ())
|
|
|
|
|
|
|
|
For each datapoint `d` in `data` computes the gradient of `loss(d...)` through
|
|
|
|
backpropagation and calls the optimizer `opt` and the callback `cb`
|
|
|
|
(i.e. `opt()` and `cb()`).
|
|
|
|
|
2017-11-04 12:27:32 +00:00
|
|
|
Multiple optimisers and callbacks can be passed to `opt` and `cb` as arrays.
|
2017-10-11 11:26:40 +00:00
|
|
|
"""
|
2017-10-11 11:18:50 +00:00
|
|
|
function train!(loss, data, opt; cb = () -> ())
|
2017-11-04 12:27:32 +00:00
|
|
|
cb = runall(cb)
|
|
|
|
opt = runall(opt)
|
2017-10-11 11:26:40 +00:00
|
|
|
@progress for d in data
|
|
|
|
l = loss(d...)
|
2017-09-27 17:33:23 +00:00
|
|
|
isinf(l.data[]) && error("Loss is Inf")
|
|
|
|
isnan(l.data[]) && error("Loss is NaN")
|
2017-09-12 13:11:03 +00:00
|
|
|
back!(l)
|
2017-09-07 04:29:55 +00:00
|
|
|
opt()
|
|
|
|
cb()
|
2017-08-24 10:42:29 +00:00
|
|
|
end
|
|
|
|
end
|