From 2adc3cd18ec4060b743c3da74c3cc3876dd4b945 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Tue, 14 Mar 2017 17:56:03 +0000 Subject: [PATCH] new struct syntax --- src/backend/mxnet/model.jl | 10 +++++----- src/backend/mxnet/mxarray.jl | 2 +- src/backend/tensorflow/model.jl | 2 +- src/backend/tensorflow/recurrent.jl | 2 +- src/backend/tensorflow/tensorflow.jl | 2 +- src/compiler/loops.jl | 2 +- src/compiler/shape.jl | 2 +- src/dims/batching.jl | 2 +- src/dims/catmat.jl | 2 +- src/dims/seq.jl | 2 +- src/layers/shape.jl | 6 +++--- src/layers/shims.jl | 6 +++--- src/model.jl | 4 ++-- 13 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/backend/mxnet/model.jl b/src/backend/mxnet/model.jl index 9aff1ed8..62393d09 100644 --- a/src/backend/mxnet/model.jl +++ b/src/backend/mxnet/model.jl @@ -1,6 +1,6 @@ using Flux: runrawbatched -type AlterParam +struct AlterParam param load store @@ -15,7 +15,7 @@ function copyargs!(as, bs) end end -type Graph +struct Graph output params::Dict{Symbol,Any} stacks::Dict{Any,Any} @@ -31,7 +31,7 @@ end ndparams(d::Dict{Symbol,MXArray}) = Dict(k => v.data for (k, v) in d) -type Exec +struct Exec graph::Graph exec::mx.Executor args::Dict{Symbol,MXArray} @@ -84,7 +84,7 @@ end # TODO: if `last` changes, update params appropriately -type Model <: Flux.Model +mutable struct Model <: Flux.Model model::Any graph::Graph execs::Dict{Tuple,Exec} @@ -119,7 +119,7 @@ Flux.update!(m::Model, η) = (update!(m.last, η); m) # MX FeedForward interface -type SoftmaxOutput +struct SoftmaxOutput name::Symbol end diff --git a/src/backend/mxnet/mxarray.jl b/src/backend/mxnet/mxarray.jl index 574ee1f0..67dd6503 100644 --- a/src/backend/mxnet/mxarray.jl +++ b/src/backend/mxnet/mxarray.jl @@ -5,7 +5,7 @@ using MXNet reversedims!(dest, xs) = permutedims!(dest, xs, ndims(xs):-1:1) -immutable MXArray{N} +struct MXArray{N} data::mx.NDArray scratch::Array{Float32,N} end diff --git a/src/backend/tensorflow/model.jl b/src/backend/tensorflow/model.jl index 77bd0e4f..dde4df4a 100644 --- a/src/backend/tensorflow/model.jl +++ b/src/backend/tensorflow/model.jl @@ -1,4 +1,4 @@ -type Model +struct Model model::Any session::Session params::Dict{Flux.Param,Tensor} diff --git a/src/backend/tensorflow/recurrent.jl b/src/backend/tensorflow/recurrent.jl index 5abee520..f88e12d3 100644 --- a/src/backend/tensorflow/recurrent.jl +++ b/src/backend/tensorflow/recurrent.jl @@ -1,6 +1,6 @@ # TODO: refactor, some of this is more general than just the TF backend -type SeqModel +struct SeqModel m::Model state::Any end diff --git a/src/backend/tensorflow/tensorflow.jl b/src/backend/tensorflow/tensorflow.jl index 5eff9278..67cd3843 100644 --- a/src/backend/tensorflow/tensorflow.jl +++ b/src/backend/tensorflow/tensorflow.jl @@ -5,7 +5,7 @@ import Flux: accuracy, rebatch, convertel export tf -type Op +struct Op f shape end diff --git a/src/compiler/loops.jl b/src/compiler/loops.jl index 462bf88f..8acf482e 100644 --- a/src/compiler/loops.jl +++ b/src/compiler/loops.jl @@ -1,6 +1,6 @@ export unroll, unroll1 -type Offset +struct Offset name::Symbol n::Int default::Nullable{Param} diff --git a/src/compiler/shape.jl b/src/compiler/shape.jl index 88c341e7..20973db3 100644 --- a/src/compiler/shape.jl +++ b/src/compiler/shape.jl @@ -2,7 +2,7 @@ using DataFlow.Interpreter export @shapes -type Hint +struct Hint typ end diff --git a/src/dims/batching.jl b/src/dims/batching.jl index f93a0207..d1a99e4d 100644 --- a/src/dims/batching.jl +++ b/src/dims/batching.jl @@ -1,6 +1,6 @@ export Batch, batchone -immutable Batch{T,S} <: AbstractVector{T} +struct Batch{T,S} <: AbstractVector{T} data::CatMat{T,S} end diff --git a/src/dims/catmat.jl b/src/dims/catmat.jl index 17592921..258ae68f 100644 --- a/src/dims/catmat.jl +++ b/src/dims/catmat.jl @@ -2,7 +2,7 @@ import Base: eltype, size, getindex, setindex!, convert export CatMat, rawbatch -immutable CatMat{T,S} <: AbstractVector{T} +struct CatMat{T,S} <: AbstractVector{T} data::S end diff --git a/src/dims/seq.jl b/src/dims/seq.jl index 58ec76e3..fe4fe6cc 100644 --- a/src/dims/seq.jl +++ b/src/dims/seq.jl @@ -1,6 +1,6 @@ export seq, Seq, BatchSeq -immutable Seq{T,S} <: AbstractVector{T} +struct Seq{T,S} <: AbstractVector{T} data::CatMat{T,S} end diff --git a/src/layers/shape.jl b/src/layers/shape.jl index 20c171cb..1c155ed6 100644 --- a/src/layers/shape.jl +++ b/src/layers/shape.jl @@ -11,12 +11,12 @@ single(i::Dims) = length(i) == 1 ? first(i) : i # Shim for kicking off shape inference -type ShapeError <: Exception +struct ShapeError <: Exception layer shape end -type Input{N} <: Model +struct Input{N} <: Model dims::Dims{N} end @@ -27,7 +27,7 @@ back!(::Input, Δ, x) = Δ # Initialise placeholder -type Init{F} +struct Init{F} f::F end diff --git a/src/layers/shims.jl b/src/layers/shims.jl index c227278b..73e09a6e 100644 --- a/src/layers/shims.jl +++ b/src/layers/shims.jl @@ -1,6 +1,6 @@ export Conv2D, MaxPool, AvgPool, Reshape -type Conv2D <: Model +struct Conv2D <: Model filter::Param{Array{Float64,4}} # [height, width, inchans, outchans] stride::Dims{2} end @@ -16,7 +16,7 @@ shape(c::Conv2D, in::Dims{3}) = for Pool in :[MaxPool, AvgPool].args @eval begin - type $Pool <: Model + struct $Pool <: Model size::Dims{2} stride::Dims{2} end @@ -34,7 +34,7 @@ for Pool in :[MaxPool, AvgPool].args end end -immutable Reshape{N} +struct Reshape{N} dims::Dims{N} end diff --git a/src/model.jl b/src/model.jl index 24b0424b..562ba314 100644 --- a/src/model.jl +++ b/src/model.jl @@ -54,7 +54,7 @@ A `Param` object stores a parameter array along with an accumulated delta to that array. When converting to backends like TensorFlow, identical `Param`s will result in identical variable objects, making model reuse trivial. """ -type Param{T} +struct Param{T} x::T Δx::T end @@ -107,7 +107,7 @@ Base.copy!(p::Param, xs) = copy!(p.x, xs) export Capacitor -type Capacitor <: Model +struct Capacitor <: Model graph::IVertex{Any} end