Add new constructors and test

This commit is contained in:
Yueh-Hua Tu 2018-08-24 10:31:13 +08:00
parent 5b37319289
commit 634d34686e
3 changed files with 59 additions and 15 deletions

View File

@ -6,8 +6,8 @@ These core layers form the foundation of almost all neural networks.
Chain Chain
Dense Dense
Conv Conv
Maxpool MaxPool
Meanpool MeanPool
``` ```
## Recurrent Layers ## Recurrent Layers

View File

@ -53,42 +53,52 @@ end
""" """
Maxpool(k) MaxPool(k)
Maxpooling layer. `k` stands for the size of the window for each dimension of the input. Maxpooling layer. `k` stands for the size of the window for each dimension of the input.
Takes the keyword arguments `pad` and `stride`. Takes the keyword arguments `pad` and `stride`.
""" """
struct Maxpool{N} struct MaxPool{N}
k::NTuple{N,Int} k::NTuple{N,Int}
pad::NTuple{N,Int} pad::NTuple{N,Int}
stride::NTuple{N,Int} stride::NTuple{N,Int}
Maxpool(k::NTuple{N,Int}; pad = map(_->0,k), stride = k) where N = new{N}(k, pad, stride) MaxPool(k::NTuple{N,Int}; pad = map(_->0,k), stride = k) where N = new{N}(k, pad, stride)
end end
(m::Maxpool)(x) = maxpool(x, m.k; pad = m.pad, stride = m.stride) function MaxPool{N}(k::Int; pad = 0, stride = k) where N
k_ = Tuple(repeat([k, ], N))
MaxPool(k_; pad = map(_->pad,k_), stride=map(_->stride,k_))
end
function Base.show(io::IO, m::Maxpool) (m::MaxPool)(x) = maxpool(x, m.k; pad = m.pad, stride = m.stride)
print(io, "Maxpool(", m.k, ", ", m.pad, ", ", m.stride, ")")
function Base.show(io::IO, m::MaxPool)
print(io, "MaxPool(", m.k, ", ", m.pad, ", ", m.stride, ")")
end end
""" """
Meanpool(k) MeanPool(k)
Meanpooling layer. `k` stands for the size of the window for each dimension of the input. Meanpooling layer. `k` stands for the size of the window for each dimension of the input.
Takes the keyword arguments `pad` and `stride`. Takes the keyword arguments `pad` and `stride`.
""" """
struct Meanpool{N} struct MeanPool{N}
k::NTuple{N,Int} k::NTuple{N,Int}
pad::NTuple{N,Int} pad::NTuple{N,Int}
stride::NTuple{N,Int} stride::NTuple{N,Int}
Meanpool(k::NTuple{N,Int}; pad = map(_->0,k), stride = k) where N = new{N}(k, pad, stride) MeanPool(k::NTuple{N,Int}; pad = map(_->0,k), stride = k) where N = new{N}(k, pad, stride)
end end
(m::Meanpool)(x) = meanpool(x, m.k; pad = m.pad, stride = m.stride) function MeanPool{N}(k::Int; pad = 0, stride = k) where N
k_ = Tuple(repeat([k, ], N))
function Base.show(io::IO, m::Meanpool) MeanPool(k_; pad = map(_->pad,k_), stride=map(_->stride,k_))
print(io, "Meanpool(", m.k, ", ", m.pad, ", ", m.stride, ")") end
(m::MeanPool)(x) = meanpool(x, m.k; pad = m.pad, stride = m.stride)
function Base.show(io::IO, m::MeanPool)
print(io, "MeanPool(", m.k, ", ", m.pad, ", ", m.stride, ")")
end end

34
test/layers/conv.jl Normal file
View File

@ -0,0 +1,34 @@
using Test
using Flux: Chain, Conv, MaxPool, MeanPool
using Base.conv
@testset "pooling" begin
mp = MaxPool((2, 2))
@testset "maxpooling" begin
@test MaxPool{2}(2) == mp
@test MaxPool{2}(2; pad=1, stride=3) == MaxPool((2, 2); pad=(1, 1), stride=(3, 3))
end
mp = MeanPool((2, 2))
@testset "meanpooling" begin
@test MeanPool{2}(2) == mp
@test MeanPool{2}(2; pad=1, stride=3) == MeanPool((2, 2); pad=(1, 1), stride=(3, 3))
end
end
@testset "cnn" begin
r = zeros(28, 28)
m = Chain(
Conv((2, 2), 1=>16, relu),
MaxPool{2}(2),
Conv((2, 2), 16=>8, relu),
MaxPool{2}(2),
x -> reshape(x, :, size(x, 4)),
Dense(288, 10), softmax)
@testset "inference" begin
@test size(m(r)) == (10, )
end
end