rm gradient checks

This commit is contained in:
Mike Innes 2019-09-10 15:35:52 +01:00
parent 221313c977
commit 877415be10
2 changed files with 0 additions and 37 deletions

View File

@ -1,33 +0,0 @@
using Flux, Test
function ngradient(f, xs::AbstractArray...)
grads = zero.(xs)
for (x, Δ) in zip(xs, grads), i in 1:length(x)
δ = sqrt(eps())
tmp = x[i]
x[i] = tmp - δ/2
y1 = f(xs...)
x[i] = tmp + δ/2
y2 = f(xs...)
x[i] = tmp
Δ[i] = (y2-y1)/δ
end
return grads
end
gradcheck(f, xs...) =
all(isapprox.(ngradient(f, xs...),
gradient(f, xs...), rtol = 1e-5, atol = 1e-5))
gradtest(f, xs::AbstractArray...) = gradcheck((xs...) -> sum(sin.(f(xs...))), xs...)
gradtest(f, dims...) = gradtest(f, rand.(Float64, dims)...)
@testset "Zygote" begin
@test gradtest(Flux.mse, rand(5,5), rand(5, 5))
@test gradtest(Flux.crossentropy, rand(5,5), rand(5, 5))
# @test gradtest(x -> Flux.normalise(x), rand(4,3))
# @test gradtest(x -> Flux.normalise(x, dims = 2), rand(3,4))
end

View File

@ -19,10 +19,6 @@ include("layers/normalisation.jl")
include("layers/stateless.jl")
include("layers/conv.jl")
@info "Running Gradient Checks"
include("gradients.jl")
if isdefined(Flux, :CUDA)
include("cuda/cuda.jl")
else