Flux.jl/src/core.jl

32 lines
834 B
Julia
Raw Normal View History

2017-06-05 16:16:28 +00:00
# This code is in a submodule with the intention that it will be split into an
# interface package.
module FluxCore
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
back!(model, ΔY, X...) => ΔX
2016-12-15 22:31:27 +00:00
2017-06-12 11:39:34 +00:00
Backpropagate the gradient `ΔY` through the model `model`, accumulating the
2016-12-15 22:31:27 +00:00
gradients of any parameters. Returns the gradient of the input `X`. Gradients
may be arrays or tuples of arrays (for multiple inputs/outputs).
"""
2017-06-12 11:39:34 +00:00
back!(model, Δ, xs...) = error("Backprop not implemented for $(typeof(model))")
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
update!(model, η) => m
2016-12-15 22:31:27 +00:00
Update the parameters of the model `m` using the accumulated gradients from
`back!`, using the learning rate `η`.
"""
2016-08-24 14:41:30 +00:00
update!(m, η) = m
2016-08-23 15:32:19 +00:00
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
graph(model) => ::IVertex{Any} | nothing
2016-12-15 22:31:27 +00:00
2017-07-03 19:47:41 +00:00
Returns the graph representation of the model, if any. May be used for
compilation, generating symbolic gradients, etc.
2016-12-15 22:31:27 +00:00
"""
2016-08-31 01:37:53 +00:00
graph(m) = nothing
2016-08-23 15:32:19 +00:00
end