fix cpu batchnorm

This commit is contained in:
CarloLucibello 2018-07-15 17:49:41 +02:00
parent a0fd91b866
commit 185e9148b6
2 changed files with 39 additions and 38 deletions

View File

@ -96,56 +96,58 @@ m = Chain(
softmax)
```
"""
mutable struct BatchNorm{F,V,W,N}
λ::F # activation function
β::V # bias
γ::V # scale
μ::W # moving mean
σ::W # moving std
ϵ::N
momentum::N
mutable struct BatchNorm
λ # activation function
β # bias
γ # scale
μ # moving mean
σ² # moving var
ϵ
momentum
active::Bool
end
BatchNorm(chs::Integer, λ = identity;
initβ = zeros, initγ = ones, ϵ = 1e-8, momentum = .1) =
function BatchNorm(chs::Integer, λ = identity;
initβ = x->zeros(Float32,x),
initγ = x->ones(Float32,x),
ϵ = 1f-8,
momentum = 0.1f0)
BatchNorm(λ, param(initβ(chs)), param(initγ(chs)),
zeros(chs), ones(chs), ϵ, momentum, true)
zeros(Float32, chs), ones(Float32, chs), ϵ, momentum, true)
end
function (BN::BatchNorm)(x)
size(x, ndims(x)-1) == length(BN.β) ||
error("BatchNorm expected $(length(BN.β)) channels, got $(size(x, ndims(x)-1))")
γ, β = BN.γ, BN.β
dims = length(size(x))
channels = size(x, dims-1)
dims = ndims(x)
affine_shape = ones(Int, dims)
affine_shape[end-1] = channels
m = prod(size(x)[1:end-2]) * size(x)[end]
affine_shape[end-1] = size(x, dims-1)
T = eltype(x)
if !BN.active
μ = reshape(BN.μ, affine_shape...)
σ = reshape(BN.σ, affine_shape...)
σ² = reshape(BN.σ², affine_shape...)
else
T = eltype(x)
ϵ = data(convert(T, BN.ϵ))
axes = [1:dims-2; dims] # axes to reduce along (all but channels axis)
m = prod(size(x, axes...))
μ = mean(x, axes)
σ = sqrt.(mean((x .- μ).^2, axes) .+ ϵ)
σ² = sum((x.-μ).^2, axes) ./ m
# update moving mean/std
mtm = data(convert(T, BN.momentum))
BN.μ = (1 - mtm) .* BN.μ .+ mtm .* squeeze(data(μ), (axes...))
BN.σ = (1 - mtm) .* BN.σ .+ mtm .* squeeze(data(σ), (axes...)) .* m ./ (m - 1)
mtm = convert(T, BN.momentum)
BN.μ = ((1 - mtm) .* BN.μ .+ mtm .* squeeze(data(μ), (axes...))) |> data
BN.σ² = ((1 - mtm) .* BN.σ² .+ mtm .* squeeze(data(σ²), (axes...))*m/(m-1)) |> data
end
let λ = BN.λ
λ.(reshape(γ, affine_shape...) .* ((x .- μ) ./ σ) .+ reshape(β, affine_shape...))
end
ϵ = convert(T, BN.ϵ)
BN.λ.(reshape(γ, affine_shape...) .* ((x .- μ) ./ sqrt.(σ² .+ ϵ)) .+ reshape(β, affine_shape...))
end
children(BN::BatchNorm) =
(BN.λ, BN.β, BN.γ, BN.μ, BN.σ, BN.ϵ, BN.momentum, BN.active)
(BN.λ, BN.β, BN.γ, BN.μ, BN.σ², BN.ϵ, BN.momentum, BN.active)
mapchildren(f, BN::BatchNorm) = # e.g. mapchildren(cu, BN)
BatchNorm(BN.λ, f(BN.β), f(BN.γ), f(BN.μ), f(BN.σ), BN.ϵ, BN.momentum, BN.active)

View File

@ -1,4 +1,5 @@
using Flux: testmode!
using Flux.Tracker: data
@testset "Dropout" begin
x = [1.,2.,3.]
@ -28,7 +29,8 @@ using Flux: testmode!
end
@testset "BatchNorm" begin
let m = BatchNorm(2), x = param([1 2; 3 4; 5 6]')
let m = BatchNorm(2), x = param([1 3 5;
2 4 6])
@test m.β.data == [0, 0] # initβ(2)
@test m.γ.data == [1, 1] # initγ(2)
@ -53,29 +55,26 @@ end
# .1 * 4 + 0 = .4
@test m.μ reshape([0.3, 0.4], 2, 1)
# julia> .1 .* std(x, 2, corrected=false) .* (3 / 2).+ .9 .* [1., 1.]
# 2×1 Array{Float64,2}:
# 1.14495
# 1.14495
@test m.σ .1 .* std(x.data, 2, corrected=false) .* (3 / 2).+ .9 .* [1., 1.]
@test m.σ² 0.1 .* var(x.data, 2, corrected=false)*3/2 + 0.9 .* [1., 1.]
testmode!(m)
@test !m.active
x = m(x).data
@test x[1] (1 - 0.3) / 1.1449489742783179
y = m(x).data
@test y data((x .- m.μ) ./ sqrt.(m.σ² .+ m.ϵ))
end
# with activation function
let m = BatchNorm(2, σ), x = param([1 2; 3 4; 5 6]')
let m = BatchNorm(2, sigmoid), x = param([1 3 5;
2 4 6])
@test m.active
m(x)
testmode!(m)
@test !m.active
x = m(x).data
@test x[1] σ((1 - 0.3) / 1.1449489742783179)
y = m(x).data
@test y data(sigmoid.((x .- m.μ) ./ sqrt.(m.σ² .+ m.ϵ)))
end
let m = BatchNorm(2), x = param(reshape(1:6, 3, 2, 1))
@ -85,7 +84,7 @@ end
end
let m = BatchNorm(2), x = param(reshape(1:12, 2, 3, 2, 1))
y = reshape(permutedims(x, [3, 1, 2, 4]), 2, :)
y = reshape(permutedims(x, [3, 1, 2, 4]), 2, :)
y = permutedims(reshape(m(y), 2, 2, 3, 1), [2, 3, 1, 4])
@test m(x) == y
end