diff --git a/src/layers/activation.jl b/src/layers/activation.jl index 7d0d27ad..5b1a27e2 100644 --- a/src/layers/activation.jl +++ b/src/layers/activation.jl @@ -1,10 +1,10 @@ # Sigmoid σ(x) = 1 ./ (1 + exp.(-x)) -back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x)) +# back!(::typeof(σ), Δ, x) = Δ .* σ(x).*(1.-σ(x)) # Rectified Linear Unit relu(x) = max(0, x) -back!(::typeof(relu), Δ, x) = Δ .* (x .> 0) +# back!(::typeof(relu), Δ, x) = Δ .* (x .> 0) softmax(xs) = exp.(xs) ./ sum(exp.(xs), 2) diff --git a/src/layers/cost.jl b/src/layers/cost.jl index 009e37cd..99c9381a 100644 --- a/src/layers/cost.jl +++ b/src/layers/cost.jl @@ -1,7 +1,7 @@ mse(ŷ, y) = sumabs2(ŷ .- y)/2 -back!(::typeof(mse), Δ, ŷ, y) = Δ .* (ŷ .- y) +# back!(::typeof(mse), Δ, ŷ, y) = Δ .* (ŷ .- y) logloss(ŷ, y) = -sum(y .* log.(ŷ)) -back!(::typeof(logloss), Δ, ŷ, y) = 0 .- Δ .* y ./ ŷ +# back!(::typeof(logloss), Δ, ŷ, y) = 0 .- Δ .* y ./ ŷ