2019-09-11 15:41:02 +00:00
|
|
|
|
using Flux, Test
|
|
|
|
|
using Flux.CuArrays
|
2018-02-28 22:51:08 +00:00
|
|
|
|
using Flux: gpu
|
2018-01-16 17:58:14 +00:00
|
|
|
|
|
2018-09-05 14:39:00 +00:00
|
|
|
|
@info "Testing GPU Support"
|
2018-01-24 13:12:22 +00:00
|
|
|
|
|
2018-01-16 17:58:14 +00:00
|
|
|
|
@testset "CuArrays" begin
|
|
|
|
|
|
2019-07-12 13:52:01 +00:00
|
|
|
|
CuArrays.allowscalar(false)
|
|
|
|
|
|
2019-08-19 14:09:32 +00:00
|
|
|
|
x = randn(5, 5)
|
2018-02-28 22:51:08 +00:00
|
|
|
|
cx = gpu(x)
|
2019-06-13 13:14:17 +00:00
|
|
|
|
@test cx isa CuArray
|
2018-01-16 17:58:14 +00:00
|
|
|
|
|
2019-08-19 14:09:32 +00:00
|
|
|
|
@test Flux.onecold(gpu([1.0, 2.0, 3.0])) == 3
|
2019-01-24 10:40:52 +00:00
|
|
|
|
|
2018-01-16 17:58:14 +00:00
|
|
|
|
x = Flux.onehotbatch([1, 2, 3], 1:3)
|
2018-02-28 22:51:08 +00:00
|
|
|
|
cx = gpu(x)
|
2019-07-12 13:52:01 +00:00
|
|
|
|
@test cx isa Flux.OneHotMatrix && cx.data isa CuArray
|
2018-08-20 12:08:04 +00:00
|
|
|
|
@test (cx .+ 1) isa CuArray
|
2018-01-16 17:58:14 +00:00
|
|
|
|
|
2018-03-01 16:37:52 +00:00
|
|
|
|
m = Chain(Dense(10, 5, tanh), Dense(5, 2), softmax)
|
2018-02-28 22:51:08 +00:00
|
|
|
|
cm = gpu(m)
|
2018-01-16 17:58:14 +00:00
|
|
|
|
|
2019-06-13 13:14:17 +00:00
|
|
|
|
@test all(p isa CuArray for p in params(cm))
|
|
|
|
|
@test cm(gpu(rand(10, 10))) isa CuArray{Float32,2}
|
2018-01-16 17:58:14 +00:00
|
|
|
|
|
2020-01-31 07:22:54 +00:00
|
|
|
|
x = [1.,2.,3.]
|
2018-04-17 16:20:51 +00:00
|
|
|
|
cx = gpu(x)
|
|
|
|
|
@test Flux.crossentropy(x,x) ≈ Flux.crossentropy(cx,cx)
|
2019-10-23 13:22:22 +00:00
|
|
|
|
@test Flux.crossentropy(x,x, weight=1.0) ≈ Flux.crossentropy(cx,cx, weight=1.0)
|
2019-10-23 13:53:09 +00:00
|
|
|
|
@test Flux.crossentropy(x,x, weight=[1.0;2.0;3.0]) ≈ Flux.crossentropy(cx,cx, weight=cu([1.0;2.0;3.0]))
|
2018-04-17 16:20:51 +00:00
|
|
|
|
|
2019-11-22 05:23:24 +00:00
|
|
|
|
x = [-1.1491, 0.8619, 0.3127]
|
2019-11-08 16:28:38 +00:00
|
|
|
|
y = [1, 1, 0.]
|
2020-01-31 07:22:21 +00:00
|
|
|
|
@test Flux.binarycrossentropy.(σ.(x),y) ≈ Array(Flux.binarycrossentropy.(cu(σ.(x)),cu(y)))
|
|
|
|
|
@test Flux.logitbinarycrossentropy.(x,y) ≈ Array(Flux.logitbinarycrossentropy.(cu(x),cu(y)))
|
2019-11-08 16:28:38 +00:00
|
|
|
|
|
2019-08-19 14:09:32 +00:00
|
|
|
|
xs = rand(5, 5)
|
2018-08-24 13:30:39 +00:00
|
|
|
|
ys = Flux.onehotbatch(1:5,1:5)
|
|
|
|
|
@test collect(cu(xs) .+ cu(ys)) ≈ collect(xs .+ ys)
|
|
|
|
|
|
2018-08-20 14:38:25 +00:00
|
|
|
|
c = gpu(Conv((2,2),3=>4))
|
2019-07-12 13:52:01 +00:00
|
|
|
|
x = gpu(rand(10, 10, 3, 2))
|
2018-08-20 14:38:25 +00:00
|
|
|
|
l = c(gpu(rand(10,10,3,2)))
|
2019-07-12 13:52:01 +00:00
|
|
|
|
@test gradient(x -> sum(c(x)), x)[1] isa CuArray
|
2018-02-28 23:18:49 +00:00
|
|
|
|
|
2019-05-01 16:07:25 +00:00
|
|
|
|
c = gpu(CrossCor((2,2),3=>4))
|
2019-07-12 13:52:01 +00:00
|
|
|
|
x = gpu(rand(10, 10, 3, 2))
|
2019-05-01 16:07:25 +00:00
|
|
|
|
l = c(gpu(rand(10,10,3,2)))
|
2019-07-12 13:52:01 +00:00
|
|
|
|
@test gradient(x -> sum(c(x)), x)[1] isa CuArray
|
2019-05-01 16:07:25 +00:00
|
|
|
|
|
2018-01-16 17:58:14 +00:00
|
|
|
|
end
|
2018-01-30 13:12:33 +00:00
|
|
|
|
|
2019-02-09 17:02:02 +00:00
|
|
|
|
@testset "onecold gpu" begin
|
|
|
|
|
y = Flux.onehotbatch(ones(3), 1:10) |> gpu;
|
|
|
|
|
@test Flux.onecold(y) isa CuArray
|
|
|
|
|
@test y[3,:] isa CuArray
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-20 18:53:28 +00:00
|
|
|
|
@testset "restructure gpu" begin
|
|
|
|
|
dudt = Dense(1,1) |> gpu
|
|
|
|
|
p,re = Flux.destructure(dudt)
|
|
|
|
|
foo(x) = sum(re(p)(x))
|
|
|
|
|
@test gradient(foo, cu(rand(1)))[1] isa CuArray
|
|
|
|
|
end
|
|
|
|
|
|
2019-11-04 14:37:25 +00:00
|
|
|
|
if CuArrays.has_cudnn()
|
|
|
|
|
@info "Testing Flux/CUDNN"
|
|
|
|
|
include("cudnn.jl")
|
|
|
|
|
include("curnn.jl")
|
2019-12-05 12:42:23 +00:00
|
|
|
|
include("layers.jl")
|
2019-11-04 14:37:25 +00:00
|
|
|
|
else
|
|
|
|
|
@warn "CUDNN unavailable, not testing GPU DNN support"
|
|
|
|
|
end
|