Final changes

This commit is contained in:
ayush1999 2018-08-11 20:55:14 +05:30 committed by Josh Christie
parent 69ccaf044f
commit 4683e925d4
2 changed files with 3 additions and 4 deletions

View File

@ -1,6 +1,6 @@
using NNlib: conv
@generated sub2(::Type{Val{N}}) where N = :(Val{$(N-2)})
@generated sub2(::Type{Val{N}}) where N = :(Val($(N-2)))
expand(N, i::Tuple) = i
expand(N, i::Integer) = ntuple(_ -> i, N)
@ -32,7 +32,7 @@ Conv(w::AbstractArray{T,N}, b::AbstractVector{T}, σ = identity;
Conv(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ = identity; init = initn,
stride = 1, pad = 0, dilation = 1) where N =
Conv(param(init(k..., ch...)), param(zero(ch[2])), σ,
Conv(param(init(k..., ch...)), param(zeros(ch[2])), σ,
stride = stride, pad = pad, dilation = dilation)
@treelike Conv

View File

@ -77,8 +77,7 @@ include("numeric.jl")
Hook into gradient backpropagation. `x` is unmodified, but when backpropagating
`f` will be applied to the incoming gradient. For example, `hook(-, x)` will reverse
the sign of the gradient applied to `x`.
"""
the sign of the gradient applied to `x`."""
hook(f, x) = istracked(x) ? track(hook, f, x) : x
@grad hook(f, x) = data(x), Δ -> (nothing, f(Δ))