Merge pull request #207 from safnuk/pull-request/07b0f95d
BatchNorm for convolutions
This commit is contained in:
commit
8f29968c32
@ -68,70 +68,84 @@ function Base.show(io::IO, l::LayerNorm)
|
|||||||
end
|
end
|
||||||
|
|
||||||
"""
|
"""
|
||||||
BatchNorm(dims...; λ = identity,
|
BatchNorm(channels::Integer, σ = identity;
|
||||||
initβ = zeros, initγ = ones, ϵ = 1e-8, momentum = .1)
|
initβ = zeros, initγ = ones,
|
||||||
|
ϵ = 1e-8, momentum = .1)
|
||||||
|
|
||||||
Batch Normalization Layer for [`Dense`](@ref) layer.
|
Batch Normalization layer. The `channels` input should be the size of the
|
||||||
|
channel dimension in your data (see below).
|
||||||
|
|
||||||
|
Given an array with `N` dimensions, call the `N-1`th the channel dimension. (For
|
||||||
|
a batch of feature vectors this is just the data dimension, for `WHCN` images
|
||||||
|
it's the usual channel dimension.)
|
||||||
|
|
||||||
|
`BatchNorm` computes the mean and variance for each each `W×H×1×N` slice and
|
||||||
|
shifts them to have a new mean and variance (corresponding to the learnable,
|
||||||
|
per-channel `bias` and `scale` parameters).
|
||||||
|
|
||||||
See [Batch Normalization: Accelerating Deep Network Training by Reducing
|
See [Batch Normalization: Accelerating Deep Network Training by Reducing
|
||||||
Internal Covariate Shift](https://arxiv.org/pdf/1502.03167.pdf)
|
Internal Covariate Shift](https://arxiv.org/pdf/1502.03167.pdf).
|
||||||
|
|
||||||
In the example of MNIST,
|
Example:
|
||||||
in order to normalize the input of other layer,
|
|
||||||
put the `BatchNorm` layer before activation function.
|
|
||||||
|
|
||||||
```julia
|
```julia
|
||||||
m = Chain(
|
m = Chain(
|
||||||
Dense(28^2, 64),
|
Dense(28^2, 64),
|
||||||
BatchNorm(64, λ = relu),
|
BatchNorm(64, relu),
|
||||||
Dense(64, 10),
|
Dense(64, 10),
|
||||||
BatchNorm(10),
|
BatchNorm(10),
|
||||||
softmax)
|
softmax)
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
mutable struct BatchNorm{F,V,N}
|
mutable struct BatchNorm{F,V,W,N}
|
||||||
λ::F # activation function
|
λ::F # activation function
|
||||||
β::V # bias
|
β::V # bias
|
||||||
γ::V # scale
|
γ::V # scale
|
||||||
μ # moving mean
|
μ::W # moving mean
|
||||||
σ # moving std
|
σ::W # moving std
|
||||||
ϵ::N
|
ϵ::N
|
||||||
momentum::N
|
momentum::N
|
||||||
active::Bool
|
active::Bool
|
||||||
end
|
end
|
||||||
|
|
||||||
BatchNorm(dims::Integer...; λ = identity,
|
BatchNorm(chs::Integer, λ = identity;
|
||||||
initβ = zeros, initγ = ones, ϵ = 1e-8, momentum = .1) =
|
initβ = zeros, initγ = ones, ϵ = 1e-8, momentum = .1) =
|
||||||
BatchNorm(λ, param(initβ(dims)), param(initγ(dims)), 0., 1., ϵ, momentum, true)
|
BatchNorm(λ, param(initβ(chs)), param(initγ(chs)),
|
||||||
|
zeros(chs), ones(chs), ϵ, momentum, true)
|
||||||
|
|
||||||
function (BN::BatchNorm)(x)
|
function (BN::BatchNorm)(x)
|
||||||
λ, γ, β = BN.λ, BN.γ, BN.β
|
λ, γ, β = BN.λ, BN.γ, BN.β
|
||||||
|
dims = length(size(x))
|
||||||
|
channels = size(x, dims-1)
|
||||||
|
affine_shape = ones(Int, dims)
|
||||||
|
affine_shape[end-1] = channels
|
||||||
|
m = prod(size(x)[1:end-2]) * size(x)[end]
|
||||||
|
|
||||||
if !BN.active
|
if !BN.active
|
||||||
μ = BN.μ
|
μ = reshape(BN.μ, affine_shape...)
|
||||||
σ = BN.σ
|
σ = reshape(BN.σ, affine_shape...)
|
||||||
else
|
else
|
||||||
T = eltype(x)
|
T = eltype(x)
|
||||||
|
|
||||||
ϵ = data(convert(T, BN.ϵ))
|
ϵ = data(convert(T, BN.ϵ))
|
||||||
m = size(x, 2) # batch size
|
axes = [1:dims-2; dims] # axes to reduce along (all but channels axis)
|
||||||
μ = mean(x, 2)
|
μ = mean(x, axes)
|
||||||
σ = sqrt.(sum((x .- μ).^2, 2) ./ m .+ ϵ)
|
σ = sqrt.(mean((x .- μ).^2, axes) .+ ϵ)
|
||||||
|
|
||||||
# update moving mean/std
|
# update moving mean/std
|
||||||
mtm = data(convert(T, BN.momentum))
|
mtm = data(convert(T, BN.momentum))
|
||||||
BN.μ = (1 - mtm) .* BN.μ .+ mtm .* data(μ)
|
BN.μ = (1 - mtm) .* BN.μ .+ mtm .* squeeze(data(μ), (axes...))
|
||||||
BN.σ = (1 - mtm) .* BN.σ .+ mtm .* data(σ) .* m ./ (m - 1)
|
BN.σ = (1 - mtm) .* BN.σ .+ mtm .* squeeze(data(σ), (axes...)) .* m ./ (m - 1)
|
||||||
end
|
end
|
||||||
|
|
||||||
λ.(γ .* ((x .- μ) ./ σ) .+ β)
|
λ.(reshape(γ, affine_shape...) .* ((x .- μ) ./ σ) .+ reshape(β, affine_shape...))
|
||||||
end
|
end
|
||||||
|
|
||||||
children(BN::BatchNorm) =
|
children(BN::BatchNorm) =
|
||||||
(BN.λ, BN.β, BN.γ, BN.μ, BN.σ, BN.momentum, BN.ϵ, BN.active)
|
(BN.λ, BN.β, BN.γ, BN.μ, BN.σ, BN.ϵ, BN.momentum, BN.active)
|
||||||
|
|
||||||
mapchildren(f, BN::BatchNorm) = # e.g. mapchildren(cu, BN)
|
mapchildren(f, BN::BatchNorm) = # e.g. mapchildren(cu, BN)
|
||||||
BatchNorm(BN.λ, f(BN.β), f(BN.γ), BN.μ, BN.σ, BN.momentum, BN.ϵ, BN.active)
|
BatchNorm(BN.λ, f(BN.β), f(BN.γ), f(BN.μ), f(BN.σ), BN.ϵ, BN.momentum, BN.active)
|
||||||
|
|
||||||
_testmode!(BN::BatchNorm, test) = (BN.active = !test)
|
_testmode!(BN::BatchNorm, test) = (BN.active = !test)
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ end
|
|||||||
end
|
end
|
||||||
|
|
||||||
# with activation function
|
# with activation function
|
||||||
let m = BatchNorm(2, λ = σ), x = param([1 2; 3 4; 5 6]')
|
let m = BatchNorm(2, σ), x = param([1 2; 3 4; 5 6]')
|
||||||
@test m.active
|
@test m.active
|
||||||
m(x)
|
m(x)
|
||||||
|
|
||||||
@ -77,4 +77,22 @@ end
|
|||||||
x′ = m(x).data
|
x′ = m(x).data
|
||||||
@test x′[1] ≈ σ((1 - 0.3) / 1.1449489742783179)
|
@test x′[1] ≈ σ((1 - 0.3) / 1.1449489742783179)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
let m = BatchNorm(2), x = param(reshape(1:6, 3, 2, 1))
|
||||||
|
y = reshape(permutedims(x, [2, 1, 3]), 2, :)
|
||||||
|
y = permutedims(reshape(m(y), 2, 3, 1), [2, 1, 3])
|
||||||
|
@test m(x) == y
|
||||||
|
end
|
||||||
|
|
||||||
|
let m = BatchNorm(2), x = param(reshape(1:12, 2, 3, 2, 1))
|
||||||
|
y = reshape(permutedims(x, [3, 1, 2, 4]), 2, :)
|
||||||
|
y = permutedims(reshape(m(y), 2, 2, 3, 1), [2, 3, 1, 4])
|
||||||
|
@test m(x) == y
|
||||||
|
end
|
||||||
|
|
||||||
|
let m = BatchNorm(2), x = param(reshape(1:24, 2, 2, 3, 2, 1))
|
||||||
|
y = reshape(permutedims(x, [4, 1, 2, 3, 5]), 2, :)
|
||||||
|
y = permutedims(reshape(m(y), 2, 2, 2, 3, 1), [2, 3, 4, 1, 5])
|
||||||
|
@test m(x) == y
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
Loading…
Reference in New Issue
Block a user