Merge branch 'master' into tf-train
This commit is contained in:
commit
cd9521a762
@ -23,7 +23,7 @@ include("core.jl")
|
||||
import .FluxCore: back!, update!, graph
|
||||
|
||||
include("utils.jl")
|
||||
|
||||
include("ops.jl")
|
||||
include("params.jl")
|
||||
|
||||
include("compiler/code.jl")
|
||||
|
@ -31,6 +31,15 @@ graph(::typeof(svd), x) = svd(x)
|
||||
graph(::typeof(size), x, dim) = TensorFlow.size(x,convert(Tensor{Int32}, dim))
|
||||
graph(::typeof(size), x) = TensorFlow.size(x)
|
||||
graph(::typeof(chol), args...) = TensorFlow.transpose(TensorFlow.cholesky(args...))
|
||||
graph(::typeof(reshape), x, dims) = TensorFlow.reshape(x,convert(Tensor{Int32},dims))
|
||||
graph(::typeof(Flux.tile), args...) = TensorFlow.tile(args...)
|
||||
graph(::typeof(fill), x, dims) = Ops.fill(convert(Tensor{Int32}, dims), Tensor(x))
|
||||
graph(::typeof(Flux.cast), args...) = TensorFlow.cast(args...)
|
||||
graph(::typeof(solve), A, b) = TensorFlow.matrix_solve(A, b)
|
||||
graph(::typeof(triangular_solve), A, b) = TensorFlow.matrix_triangular_solve(A, b; lower=false)
|
||||
graph(::typeof(randu), x) = Ops.random_uniform(convert(Tensor{Int32},x);dtype=Float32)
|
||||
graph(::typeof(randn), x) = TensorFlow.random_normal(convert(Tensor{Int32},x);dtype=Float32)
|
||||
graph(::typeof(Flux.expand_dims), x, dim) = TensorFlow.expand_dims(x,convert(Tensor{Int32},dim))
|
||||
|
||||
for op in (*, .*, .+, .^, log, exp, ceil, floor, sqrt, abs, cos,
|
||||
sin, tan, atan, asin, acos, tanh, lgamma, erf, erfc, real, imag, conj,
|
||||
|
@ -1,4 +1,4 @@
|
||||
using Flux: mapt, collectt, shapecheckt
|
||||
using Flux: Param, mapt, collectt, shapecheckt
|
||||
|
||||
struct Exec
|
||||
session ::Session
|
||||
|
@ -1,7 +1,7 @@
|
||||
module TF
|
||||
|
||||
using ..Flux, DataFlow, TensorFlow, Juno
|
||||
import Flux: accuracy, convertel, Param
|
||||
import Flux: accuracy, convertel
|
||||
|
||||
export tf
|
||||
|
||||
|
18
src/ops.jl
Normal file
18
src/ops.jl
Normal file
@ -0,0 +1,18 @@
|
||||
export reshape, tile, fill, cast, solve, triangular_solve, randu, randn,
|
||||
expand_dims
|
||||
|
||||
import Base: reshape, fill, randn
|
||||
|
||||
reshape(x::AbstractArray, dims::AbstractArray) = reshape(x,tuple(dims...))
|
||||
tile(x::AbstractArray, mult::AbstractArray) = repeat(x,outer=tuple(mult...))
|
||||
fill{T}(x::T, dims::AbstractArray) = fill(x,tuple(dims...))
|
||||
cast{T}(x::AbstractArray, ::Type{T}) = convert(Array{T},x)
|
||||
solve(A::AbstractArray, b::AbstractArray) = A\b
|
||||
triangular_solve(A::AbstractArray, b::AbstractArray) = A\b
|
||||
randu(x::AbstractArray) = rand(tuple(x...))
|
||||
randn(x::AbstractArray) = randn(tuple(x...))
|
||||
|
||||
function expand_dims(x,dim)
|
||||
s = [size(x)...]
|
||||
reshape(x,tuple(vcat(s[1:dim-1],1,s[dim:end])...))
|
||||
end
|
@ -25,18 +25,59 @@ macro cb(ex, t, f)
|
||||
end)
|
||||
end
|
||||
|
||||
"""
|
||||
Returns a function that when invoked, will only be triggered at most once
|
||||
during `timeout` seconds. Normally, the throttled function will run
|
||||
as much as it can, without ever going more than once per `wait` duration;
|
||||
but if you'd like to disable the execution on the leading edge, pass
|
||||
`leading=false`. To enable execution on the trailing edge, ditto.
|
||||
"""
|
||||
function throttle(f, timeout; leading=true, trailing=false)
|
||||
cooldown = true
|
||||
later = nothing
|
||||
|
||||
function throttled(args...; kwargs...)
|
||||
yield()
|
||||
|
||||
if cooldown
|
||||
if leading
|
||||
f(args...; kwargs...)
|
||||
else
|
||||
later = () -> f(args...; kwargs...)
|
||||
end
|
||||
|
||||
cooldown = false
|
||||
@schedule try
|
||||
while (sleep(timeout); later != nothing)
|
||||
later()
|
||||
later = nothing
|
||||
end
|
||||
finally
|
||||
cooldown = true
|
||||
end
|
||||
elseif trailing
|
||||
later = () -> f(args...; kwargs...)
|
||||
end
|
||||
|
||||
nothing
|
||||
end
|
||||
end
|
||||
|
||||
function train!(m, train; cb = [],
|
||||
epoch = 1, η = 0.1, loss = mse)
|
||||
callback = throttle(()->foreach(f -> f(), cb), 5)
|
||||
|
||||
@progress for e in 1:epoch
|
||||
info("Epoch $e")
|
||||
@cb for (x, y) in train
|
||||
for (x, y) in train
|
||||
x, y = mapt(tobatch, (x, y))
|
||||
ŷ = m(x)
|
||||
any(isnan, ŷ) && error("NaN")
|
||||
Δ = back!(loss, 1, ŷ, y)
|
||||
back!(m, Δ, x)
|
||||
update!(m, η)
|
||||
end 5 foreach(f -> f(), cb)
|
||||
callback()
|
||||
end
|
||||
end
|
||||
return m
|
||||
end
|
||||
|
@ -47,6 +47,24 @@ end
|
||||
A = randn(6,5)
|
||||
A = A'*A
|
||||
@test tf(@net x -> chol(x))(A) ≈ chol(A)
|
||||
A = randn(Float32,(6,3))
|
||||
@test transpose(tf(@net (x,y) -> reshape(x,y))(transpose(A),[2,9])) ≈ reshape(A,(9,2)) # Note: TF is row major and julia is not
|
||||
A = randn(Float32,(4,3,1))
|
||||
@test tf(@net (x,y) -> Flux.tile(x,y))(A,[1,1,3]) ≈ repeat(A,outer=(1,1,3))
|
||||
@test tf(@net (x,y) -> fill(x,y))(3.2,[3,2]) ≈ convert(Array{Float32},3.2*ones(3,2))
|
||||
@test typeof(tf(@net x -> Flux.cast(x,Int32))(A)) == Array{Int32,3}
|
||||
A = randn(Float32,(5,5))
|
||||
b = randn(Float32,(5,1))
|
||||
@test tf(@net (x,y) -> solve(x,y))(A,b) ≈ A\b
|
||||
_,A,_ = lu(A)
|
||||
@test tf(@net (x,y) -> triangular_solve(x,y))(A,b) ≈ A\b
|
||||
@test size(tf(@net x -> randu(x))([2,3])) == (2,3)
|
||||
@test size(tf(@net x -> randn(x))([2,3])) == (2,3)
|
||||
m = tf(@net (x,y) -> Flux.expand_dims(x,y))
|
||||
A = randn(Float32,(3,2))
|
||||
@test m(A,1) ≈ Flux.expand_dims(A,1)
|
||||
@test m(A,2) ≈ Flux.expand_dims(A,2)
|
||||
@test m(A,3) ≈ Flux.expand_dims(A,3)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -18,6 +18,7 @@ include("backend/common.jl")
|
||||
include("basic.jl")
|
||||
include("recurrent.jl")
|
||||
include("optimizer.jl")
|
||||
include("throttle.jl")
|
||||
|
||||
@tfonly include("backend/tensorflow.jl")
|
||||
@mxonly include("backend/mxnet.jl")
|
||||
|
49
test/throttle.jl
Normal file
49
test/throttle.jl
Normal file
@ -0,0 +1,49 @@
|
||||
using Flux.throttle
|
||||
|
||||
@testset "throttle" begin
|
||||
@testset "default behaviour" begin
|
||||
a = []
|
||||
f = throttle(()->push!(a, now()), 1, leading=true, trailing=false)
|
||||
f()
|
||||
f()
|
||||
f()
|
||||
sleep(1.01)
|
||||
@test length(a) == 1
|
||||
end
|
||||
|
||||
@testset "leading behaviour" begin
|
||||
a = []
|
||||
f = throttle(()->push!(a, now()), 1, leading=true, trailing=false)
|
||||
f()
|
||||
@test length(a) == 1
|
||||
f()
|
||||
@test length(a) == 1
|
||||
sleep(1.01)
|
||||
f()
|
||||
@test length(a) == 2
|
||||
end
|
||||
|
||||
@testset "trailing behaviour" begin
|
||||
a = []
|
||||
f = throttle(()->push!(a, now()), 1, leading=false, trailing=true)
|
||||
f()
|
||||
@test length(a) == 0
|
||||
f()
|
||||
@test length(a) == 0
|
||||
sleep(1.01)
|
||||
@test length(a) == 1
|
||||
end
|
||||
|
||||
@testset "arguments" begin
|
||||
a = []
|
||||
f = throttle((x)->push!(a, x), 1, leading=true, trailing=true)
|
||||
f(1)
|
||||
@test a == [1]
|
||||
f(2)
|
||||
@test a == [1]
|
||||
f(3)
|
||||
@test a == [1]
|
||||
sleep(1.01)
|
||||
@test a == [1, 3]
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue
Block a user