From 6b4bbd4fce55820a28bb24b7b63d0a7f5ecbf65f Mon Sep 17 00:00:00 2001 From: Tejan Karmali Date: Wed, 10 Oct 2018 10:29:15 -0400 Subject: [PATCH] reverted back the weights changes in rnndesc --- src/cuda/cudnn.jl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/cuda/cudnn.jl b/src/cuda/cudnn.jl index 86f673bc..f314cbef 100644 --- a/src/cuda/cudnn.jl +++ b/src/cuda/cudnn.jl @@ -87,11 +87,8 @@ function RNNDesc{T}(mode::Int, input::Int, hidden::Int; layers = 1) where T libcudnn_handle[],d[],hidden,layers,dropoutDesc,inputMode,direction,mode,algo,cudnnDataType(T)) w = cuzeros(T, rnnParamSize(T, d[], input)) - (wx, wh), bias = params(w, input, hidden, ngates(mode)) - w_ = vcat(wx[:], wh[:], bias) - w[1:length(w_)] .= w_ # TODO: avoid reserve allocation here - rd = RNNDesc{T}(mode, input, hidden, w, (wx, wh), bias, d[]) + rd = RNNDesc{T}(mode, input, hidden, w, params(w, input, hidden, ngates(mode))..., d[]) finalizer(rd) do x @check ccall((:cudnnDestroyRNNDescriptor,libcudnn),cudnnStatus_t,(Ptr{Nothing},),x) end