Flux.jl/src/core.jl

36 lines
1.1 KiB
Julia
Raw Normal View History

2017-06-05 16:16:28 +00:00
# This code is in a submodule with the intention that it will be split into an
# interface package.
module FluxCore
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
back!(model, ΔY, X...) => ΔX
2016-12-15 22:31:27 +00:00
Backpropagate the gradient `ΔY` through the model `m`, accumulating the
gradients of any parameters. Returns the gradient of the input `X`. Gradients
may be arrays or tuples of arrays (for multiple inputs/outputs).
"""
2017-06-05 15:08:23 +00:00
back!(model, Δ, xs...) = error("Backprop not implemented for $(typeof(m))")
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
update!(model, η) => m
2016-12-15 22:31:27 +00:00
Update the parameters of the model `m` using the accumulated gradients from
`back!`, using the learning rate `η`.
"""
2016-08-24 14:41:30 +00:00
update!(m, η) = m
2016-08-23 15:32:19 +00:00
2016-12-15 22:31:27 +00:00
"""
2017-06-05 15:08:23 +00:00
graph(model) => ::IVertex{Any} | nothing
2016-12-15 22:31:27 +00:00
Returns the graph representation of the model, if any. Most models are built
from lower-level components and can simply implement this method to get most of
Flux's functionality. If this method isn't available, functionality like
backpropagation or conversion for backend must be implemented on a case-by-case
basis. Alternatively, one can implement this method and override individual
methods as necessary.
"""
2016-08-31 01:37:53 +00:00
graph(m) = nothing
2016-08-23 15:32:19 +00:00
end