diff --git a/src/Flux.jl b/src/Flux.jl index 7671ddd2..f33351b7 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -8,7 +8,7 @@ using Juno, Requires using Lazy: @forward export Chain, Dense, RNN, LSTM, Dropout, LayerNorm, - SGD, ADAM, Momentum, Nesterov, + SGD, ADAM, Momentum, Nesterov, AMSGrad, param, params, mapleaves using NNlib diff --git a/src/optimise/Optimise.jl b/src/optimise/Optimise.jl index 5f144b65..acec542e 100644 --- a/src/optimise/Optimise.jl +++ b/src/optimise/Optimise.jl @@ -1,7 +1,7 @@ module Optimise export update!, params, train!, - SGD, ADAM, Momentum, Nesterov, RMSProp, ADAGrad, ADADelta + SGD, ADAM, Momentum, Nesterov, RMSProp, ADAGrad, ADADelta, AMSGrad struct Param{T} x::T diff --git a/src/optimise/interface.jl b/src/optimise/interface.jl index 47b0f62c..42b05dc8 100644 --- a/src/optimise/interface.jl +++ b/src/optimise/interface.jl @@ -73,3 +73,12 @@ tuning. """ ADADelta(ps; ρ = 0.9, ϵ = 1e-8, decay = 0) = optimiser(ps, p->adadelta(p; ρ=ρ, ϵ=ϵ), p->descent(p,1)) + +""" + AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) + +[AMSGrad](https://openreview.net/forum?id=ryQu7f-RZ) optimiser. Parameters don't need +tuning. +""" +AMSGrad(ps, η = 0.001; β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) = + optimiser(ps, p -> amsgrad(p; η = η, β1 = β1, β2 = β2, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) diff --git a/src/optimise/optimisers.jl b/src/optimise/optimisers.jl index 7cf271b6..c09e6131 100644 --- a/src/optimise/optimisers.jl +++ b/src/optimise/optimisers.jl @@ -56,12 +56,24 @@ function adam(p::Param; η::Real = 0.001, β1::Real = 0.9, β2::Real = 0.999, ϵ function () @. mt = β1 * mt + (1 - β1) * p.Δ @. vt = β2 * vt + (1 - β2) * p.Δ^2 - @. p.Δ = mt / (1 - β1p) / (sqrt(vt / (1 - β2p)) + ϵ) * η + @. p.Δ = mt / (1 - β1p) / (√(vt / (1 - β2p)) + ϵ) * η β1p *= β1 β2p *= β2 end end +function amsgrad(p::Param; η::Real = 0.001, β1::Real = 0.9, β2::Real = 0.999, ϵ::Real = 1e-8) + mt = zeros(p.x) + vt = zeros(p.x) .+ ϵ + v̂t = zeros(p.x) .+ ϵ + function () + @. mt = β1 * mt + (1 - β1) * p.Δ + @. vt = β2 * vt + (1 - β2) * p.Δ ^ 2 + @. v̂t = max.(v̂t, vt) + @. p.Δ = η * mt / √v̂t + end +end + clip(p::Param, thresh::Real) = () -> clamp!(p.Δ, -thresh, thresh) function expdecay(p::Param, γ::Real) @@ -82,4 +94,4 @@ function invdecay(p::Param, γ::Real) else return () -> nothing end -end \ No newline at end of file +end diff --git a/test/optimise.jl b/test/optimise.jl index 65bb65be..526f0534 100644 --- a/test/optimise.jl +++ b/test/optimise.jl @@ -3,7 +3,7 @@ using Flux.Tracker @testset "Optimise" begin w = randn(10, 10) - for Opt in [SGD, Nesterov, Momentum, ADAM, RMSProp, ps -> ADAGrad(ps, 0.1), ADADelta] + for Opt in [SGD, Nesterov, Momentum, ADAM, RMSProp, ps -> ADAGrad(ps, 0.1), ADADelta, AMSGrad] w′ = param(randn(10, 10)) loss(x) = Flux.mse(w*x, w′*x) opt = Opt([w′])