From af30ee78d2b31bd9931203718b244fba846abb2b Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Mon, 1 May 2017 19:44:26 +0100 Subject: [PATCH] remove old code --- src/model.jl | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/src/model.jl b/src/model.jl index bfce352f..279b0ed6 100644 --- a/src/model.jl +++ b/src/model.jl @@ -60,9 +60,9 @@ function runmodel end # TODO: should be AbstractArray? """ -A `Param` object stores a parameter array along with an accumulated delta to -that array. When converting to backends like TensorFlow, identical `Param`s will -result in identical variable objects, making model reuse trivial. +A `Param` object stores a parameter array along with its gradient. +When converting to backends like TensorFlow, identical `Param`s will +result in identical variable objects. """ struct Param{T} x::T @@ -78,17 +78,6 @@ param(x) = Param(x, zero(x)) state(p::Param) = p.x -""" - accumulate!(p::Param, Δ) => p - -Accumulates the update `Δ` on `p`. The value of `p` won't change until -`update!`. -""" -function accumulate!(p::Param, Δ) - p.Δx += Δ - return p -end - """ update!(p::Param) @@ -101,7 +90,6 @@ function update!(p::Param, η) end state(x) = x -accumulate!(x, Δ) = x Base.size(p::Param) = size(p.x) Base.size(p::Param, n) = size(p.x, n)