From 36001d085a3f9175eaee572e8b8532410a8ebf50 Mon Sep 17 00:00:00 2001 From: baggepinnen Date: Mon, 4 Dec 2017 09:17:05 +0100 Subject: [PATCH 1/3] Implement AMSGrad optimiser --- src/optimise/Optimise.jl | 2 +- src/optimise/interface.jl | 9 +++++++++ src/optimise/optimisers.jl | 14 +++++++++++++- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/optimise/Optimise.jl b/src/optimise/Optimise.jl index 5f144b65..acec542e 100644 --- a/src/optimise/Optimise.jl +++ b/src/optimise/Optimise.jl @@ -1,7 +1,7 @@ module Optimise export update!, params, train!, - SGD, ADAM, Momentum, Nesterov, RMSProp, ADAGrad, ADADelta + SGD, ADAM, Momentum, Nesterov, RMSProp, ADAGrad, ADADelta, AMSGrad struct Param{T} x::T diff --git a/src/optimise/interface.jl b/src/optimise/interface.jl index 0b2a25ae..c6f98553 100644 --- a/src/optimise/interface.jl +++ b/src/optimise/interface.jl @@ -71,3 +71,12 @@ tuning. """ ADADelta(ps; η = 0.01, ρ = 0.95, ϵ = 1e-8, decay = 0) = optimiser(ps, p -> adadelta(p; ρ = ρ, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) + + """ + AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) + + [AMSGrad](https://openreview.net/forum?id=ryQu7f-RZ) optimiser. Parameters don't need + tuning. + """ + AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) = + optimiser(ps, p -> amsgrad(p; η = η, β1 = β1, β2 = β2, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) diff --git a/src/optimise/optimisers.jl b/src/optimise/optimisers.jl index abc54090..12a14df4 100644 --- a/src/optimise/optimisers.jl +++ b/src/optimise/optimisers.jl @@ -67,8 +67,20 @@ function adam(p::Param; η::Real = 0.001, β1::Real = 0.9, β2::Real = 0.999, ϵ function () @. mt = β1 * mt + (1 - β1) * p.Δ @. vt = β2 * vt + (1 - β2) * p.Δ ^ 2 - @. p.Δ = √(1 - β2p) / √(1 - β1p) * mt / √vt * η + @. p.Δ = √(1 - β2p) / (1 - β1p) * mt / √vt * η β1p *= β1 β2p *= β2 end end + +function amsgrad(p::Param; η::Real = 0.001, β1::Real = 0.9, β2::Real = 0.999, ϵ::Real = 1e-8) + mt = zeros(p.x) + vt = zeros(p.x) .+ ϵ + v̂t = zeros(p.x) .+ ϵ + function () + @. mt = β1 * mt + (1 - β1) * p.Δ + @. vt = β2 * vt + (1 - β2) * p.Δ ^ 2 + @. v̂t = max.(v̂t, vt) + @. p.Δ = η * mt / √v̂t + end +end From 41febee9c171e610336afd79e1d1480100f29a53 Mon Sep 17 00:00:00 2001 From: baggepinnen Date: Mon, 4 Dec 2017 09:34:27 +0100 Subject: [PATCH 2/3] Export and indent --- src/Flux.jl | 2 +- src/optimise/interface.jl | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/Flux.jl b/src/Flux.jl index df4b1636..2ae8879f 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -8,7 +8,7 @@ using Juno, Requires using Lazy: @forward export Chain, Dense, RNN, LSTM, Dropout, LayerNorm, - SGD, ADAM, Momentum, Nesterov, + SGD, ADAM, Momentum, Nesterov, AMSGrad, param, params, mapleaves using NNlib diff --git a/src/optimise/interface.jl b/src/optimise/interface.jl index c6f98553..679134fe 100644 --- a/src/optimise/interface.jl +++ b/src/optimise/interface.jl @@ -47,7 +47,7 @@ RMSProp(ps, η = 0.001; ρ = 0.9, ϵ = 1e-8, decay = 0) = optimiser(ps, p -> rmsprop(p; η = η, ρ = ρ, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) """ - ADAM(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) + ADAM(params, η = 0.001; β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) [ADAM](https://arxiv.org/abs/1412.6980v8) optimiser. """ @@ -72,11 +72,11 @@ tuning. ADADelta(ps; η = 0.01, ρ = 0.95, ϵ = 1e-8, decay = 0) = optimiser(ps, p -> adadelta(p; ρ = ρ, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) - """ - AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) +""" + AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) - [AMSGrad](https://openreview.net/forum?id=ryQu7f-RZ) optimiser. Parameters don't need - tuning. - """ - AMSGrad(params; η = 0.001, β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) = - optimiser(ps, p -> amsgrad(p; η = η, β1 = β1, β2 = β2, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) +[AMSGrad](https://openreview.net/forum?id=ryQu7f-RZ) optimiser. Parameters don't need +tuning. +""" +AMSGrad(ps, η = 0.001; β1 = 0.9, β2 = 0.999, ϵ = 1e-08, decay = 0) = + optimiser(ps, p -> amsgrad(p; η = η, β1 = β1, β2 = β2, ϵ = ϵ), p -> invdecay(p, decay), p -> descent(p, 1)) From 55bbe50f32d7dfe58360da9da3832add38a8cc38 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Fri, 8 Dec 2017 18:24:07 +0000 Subject: [PATCH 3/3] regression test --- test/optimise.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/optimise.jl b/test/optimise.jl index 65bb65be..526f0534 100644 --- a/test/optimise.jl +++ b/test/optimise.jl @@ -3,7 +3,7 @@ using Flux.Tracker @testset "Optimise" begin w = randn(10, 10) - for Opt in [SGD, Nesterov, Momentum, ADAM, RMSProp, ps -> ADAGrad(ps, 0.1), ADADelta] + for Opt in [SGD, Nesterov, Momentum, ADAM, RMSProp, ps -> ADAGrad(ps, 0.1), ADADelta, AMSGrad] w′ = param(randn(10, 10)) loss(x) = Flux.mse(w*x, w′*x) opt = Opt([w′])