From 385dee9d16e29bb37cc4083ffd5bad736a70f520 Mon Sep 17 00:00:00 2001 From: baggepinnen Date: Fri, 8 Dec 2017 14:46:12 +0100 Subject: [PATCH] Add jacobian function --- src/Flux.jl | 2 +- src/utils.jl | 19 +++++++++++++++++++ test/utils.jl | 9 +++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/src/Flux.jl b/src/Flux.jl index df4b1636..2c79e426 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -9,7 +9,7 @@ using Lazy: @forward export Chain, Dense, RNN, LSTM, Dropout, LayerNorm, SGD, ADAM, Momentum, Nesterov, - param, params, mapleaves + param, params, mapleaves, jacobian using NNlib export σ, relu, leakyrelu, elu, swish, softmax diff --git a/src/utils.jl b/src/utils.jl index f822c111..755b54e9 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -120,3 +120,22 @@ function throttle(f, timeout; leading=true, trailing=false) nothing end end + +""" + J = jacobian(m,x) + +Calculate the output jacobian `J = d/dx m(x)` such that each row `i` of `J` corresponds to the gradient `J[i,:] = ∇ₓ(m(x)[i])` +""" +function jacobian(m,x) + xp = param(x) + y = m(xp) + k = length(y) + n = length(x) + J = Matrix{eltype(x)}(n,k) + for i = 1:k + Flux.back!(y[i]) # Populate gradient accumulator + J[:,i] = xp.grad + xp.grad .*= 0 # Reset gradient accumulator + end + J' +end diff --git a/test/utils.jl b/test/utils.jl index 7638fd2a..abee0f24 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -47,3 +47,12 @@ using Flux: throttle @test a == [1, 3] end end + +@testset "Jacobian" begin + A = param(randn(2,2)) + x = randn(2) + m(x) = A*x + y = m(x) + J = jacobian(m,x) + @test J ≈ A.data +end