diff --git a/src/Flux.jl b/src/Flux.jl index 671581aa..801bc5ca 100644 --- a/src/Flux.jl +++ b/src/Flux.jl @@ -10,6 +10,11 @@ using DataFlow: graphm, syntax, prewalk!, postwalk!, prewalk, postwalk, using DataFlow.Interpreter using Juno: Tree, Row +export @net, unroll, unroll1, @shapes, + Chain, @Chain, Input, Affine, Conv2D, Recurrent, GatedRecurrent, LSTM, + σ, relu, softmax, + tf, mxnet + # Zero Flux Given include("utils.jl") diff --git a/src/backend/backend.jl b/src/backend/backend.jl index 6f8f9d33..23ea8629 100644 --- a/src/backend/backend.jl +++ b/src/backend/backend.jl @@ -1,5 +1,3 @@ -export tf, mxnet - # We use a lazy-loading trick to load the backend code as needed; this avoids # the need for a hard dependency on both backends. diff --git a/src/compiler/code.jl b/src/compiler/code.jl index fb403897..1ef4af64 100644 --- a/src/compiler/code.jl +++ b/src/compiler/code.jl @@ -1,8 +1,6 @@ import DataFlow: cse using MacroTools: @q -export @net - function graphdef(ex, params = []) @capture(shortdef(ex), (args__,) -> body_) body = @> body MacroTools.flatten liftloops graphm DataFlow.il diff --git a/src/compiler/loops.jl b/src/compiler/loops.jl index 2e025be8..aae9d700 100644 --- a/src/compiler/loops.jl +++ b/src/compiler/loops.jl @@ -1,5 +1,3 @@ -export unroll, unroll1 - # Stateful Models mutable struct Stateful diff --git a/src/compiler/shape.jl b/src/compiler/shape.jl index 414170af..1b7fb41f 100644 --- a/src/compiler/shape.jl +++ b/src/compiler/shape.jl @@ -1,5 +1,3 @@ -export @shapes - Dims{N} = NTuple{N,Int} struct Hint @@ -65,8 +63,6 @@ end # Shim for kicking off shape inference -export Input - struct Input{N} dims::Dims{N} end diff --git a/src/layers/activation.jl b/src/layers/activation.jl index 9955d1f4..d22ce7d7 100644 --- a/src/layers/activation.jl +++ b/src/layers/activation.jl @@ -1,5 +1,3 @@ -export σ, relu, softmax - # Sigmoid σ(x) = 1 ./ (1 + exp.(-x)) back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x)) diff --git a/src/layers/affine.jl b/src/layers/affine.jl index d245971c..9608efcc 100644 --- a/src/layers/affine.jl +++ b/src/layers/affine.jl @@ -1,5 +1,3 @@ -export Affine - @net type Affine W b diff --git a/src/layers/control.jl b/src/layers/control.jl index ec12cc3c..7851f902 100644 --- a/src/layers/control.jl +++ b/src/layers/control.jl @@ -1,5 +1,3 @@ -export Chain, @Chain - type Chain layers::Vector{Any} Chain(xs...) = new([xs...]) diff --git a/src/layers/recurrent.jl b/src/layers/recurrent.jl index 3874287b..883addf7 100644 --- a/src/layers/recurrent.jl +++ b/src/layers/recurrent.jl @@ -1,5 +1,3 @@ -export Recurrent, GatedRecurrent, LSTM - @net type Recurrent Wxy; Wyy; by y diff --git a/src/layers/shims.jl b/src/layers/shims.jl index 26c20f0c..b1d47fcd 100644 --- a/src/layers/shims.jl +++ b/src/layers/shims.jl @@ -1,5 +1,3 @@ -export Conv2D - struct Conv2D filter::Param{Array{Float64,4}} # [height, width, inchans, outchans] stride::Dims{2}