From 86097e76fdaa149b2caf815d5404c77d16c4f754 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Fri, 8 Dec 2017 19:34:34 +0000 Subject: [PATCH] tweak batchnorm example --- src/layers/normalisation.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/layers/normalisation.jl b/src/layers/normalisation.jl index 4eaa6d5b..a018a073 100644 --- a/src/layers/normalisation.jl +++ b/src/layers/normalisation.jl @@ -81,13 +81,12 @@ in order to normalize the input of other layer, put the `BatchNorm` layer before activation function. ```julia -julia> m = Chain( +m = Chain( Dense(28^2, 64), BatchNorm(64, λ = relu), Dense(64, 10), BatchNorm(10), softmax) -Chain(Dense(784, 64), BatchNorm(64, λ = NNlib.relu), Dense(64, 10), BatchNorm(10), NNlib.softmax) ``` """ mutable struct BatchNorm{F,V,N}