From 92e09e204d0684258f76aac92e509aa89935b6ec Mon Sep 17 00:00:00 2001 From: Adarsh Kumar <45385384+AdarshKumar712@users.noreply.github.com> Date: Mon, 2 Mar 2020 20:33:12 +0530 Subject: [PATCH] =?UTF-8?q?Test=20argument=20consistency=20with=20=C5=B7?= =?UTF-8?q?=20and=20y?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/layers/stateless.jl | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/test/layers/stateless.jl b/test/layers/stateless.jl index 702288b6..ce940bf9 100644 --- a/test/layers/stateless.jl +++ b/test/layers/stateless.jl @@ -22,9 +22,9 @@ const ϵ = 1e-7 end y = [123.0,456.0,789.0] - y1 = [345.0,332.0,789.0] + ŷ = [345.0,332.0,789.0] @testset "msle" begin - @test Flux.msle(y1, y) ≈ 0.38813985859136585 + @test Flux.msle(ŷ, y) ≈ 0.38813985859136585 end # Now onehot y's @@ -65,49 +65,50 @@ const ϵ = 1e-7 end y = [1 2 3] - y1 = [4.0 5.0 6.0] + ŷ = [4.0 5.0 6.0] @testset "kldivergence" begin - @test Flux.kldivergence(y, y1) ≈ 4.761838062403337 + @test Flux.kldivergence(ŷ, y) ≈ -1.7661057888493457 @test Flux.kldivergence(y, y) ≈ 0 end y = [1 2 3 4] - y1 = [5.0 6.0 7.0 8.0] + ŷ = [5.0 6.0 7.0 8.0] @testset "hinge" begin - @test Flux.hinge(y, y1) ≈ 0 + @test Flux.hinge(ŷ, y) ≈ 0 @test Flux.hinge(y, 0.5 .* y) ≈ 0.125 end @testset "squared_hinge" begin - @test Flux.squared_hinge(y, y1) ≈ 0 + @test Flux.squared_hinge(ŷ, y) ≈ 0 @test Flux.squared_hinge(y, 0.5 .* y) ≈ 0.0625 end y = [0.1 0.2 0.3] - y1 = [0.4 0.5 0.6] + ŷ = [0.4 0.5 0.6] @testset "poisson" begin - @test Flux.poisson(y, y1) ≈ 1.0160455586700767 + @test Flux.poisson(ŷ, y) ≈ 0.6278353988097339 @test Flux.poisson(y, y) ≈ 0.5044459776946685 end y = [1.0 0.5 0.3 2.4] - y1 = [0 1.4 0.5 1.2] + ŷ = [0 1.4 0.5 1.2] @testset "dice_coeff_loss" begin - @test Flux.dice_coeff_loss(y, y1) ≈ 0.2799999999999999 - @test Flux.dice_coeff_loss(y,y) ≈ 0.0 + @test Flux.dice_coeff_loss(ŷ, y) ≈ 0.2799999999999999 + @test Flux.dice_coeff_loss(y, y) ≈ 0.0 end @testset "tversky_loss" begin - @test Flux.tversky_loss(y,y1) ≈ 0.028747433264887046 - @test Flux.tversky_loss(y,y1,beta = 0.8) ≈ 0.050200803212851364 - @test Flux.tversky_loss(y,y) ≈ -0.5576923076923075 + @test Flux.tversky_loss(ŷ, y) ≈ -0.06772009029345383 + @test Flux.tversky_loss(ŷ, y, β = 0.8) ≈ -0.09490740740740744 + @test Flux.tversky_loss(y, y) ≈ -0.5576923076923075 end @testset "no spurious promotions" begin for T in (Float32, Float64) y = rand(T, 2) ŷ = rand(T, 2) - for f in (mse, crossentropy, logitcrossentropy, Flux.kldivergence, Flux.hinge, Flux.poisson,Flux.mae,Flux.huber_loss,Flux.msle,Flux.squared_hinge,Flux.dice_coeff_loss,Flux.tversky_loss) + for f in (mse, crossentropy, logitcrossentropy, Flux.kldivergence, Flux.hinge, Flux.poisson, + Flux.mae, Flux.huber_loss, Flux.msle, Flux.squared_hinge, Flux.dice_coeff_loss, Flux.tversky_loss) fwd, back = Flux.pullback(f, ŷ, y) @test fwd isa T @test eltype(back(one(T))[1]) == T