From 633f0df01fc0e200e4a03cb7f3e93f868a7d1b72 Mon Sep 17 00:00:00 2001 From: Manjunath Bhat Date: Tue, 12 Mar 2019 02:31:42 +0530 Subject: [PATCH] Added new loss functions. --- src/layers/stateless.jl | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/layers/stateless.jl b/src/layers/stateless.jl index 23fd1651..3bb48f1f 100644 --- a/src/layers/stateless.jl +++ b/src/layers/stateless.jl @@ -54,3 +54,31 @@ function normalise(x::AbstractArray, dims) Base.depwarn("`normalise(x::AbstractArray, dims)` is deprecated, use `normalise(a, dims=dims)` instead.", :normalise) normalise(x, dims = dims) end + +""" + Kullback Leibler Divergence(KL Divergence) +KLDivergence is a measure of how much one probability distribution is different from the other. +It is always non-negative and zero only when both the distributions are equal everywhere. + +""" +function KLDivergence(ŷ, y) + entropy = sum(y .* log.(y)) *1 //size(y,2) + cross_entropy = crossentropy(ŷ, y) + return entropy + cross_entropy +end + +""" + Poisson Loss function +Poisson loss function is a measure of how the predicted distribution diverges from the expected distribution. + +""" +Poisson(ŷ, y) = sum(ŷ .- y .* log.(ŷ)) *1 // size(y,2) + +""" + Logcosh Loss function +""" + +logcosh(ŷ, y) = sum(log.(cosh.(ŷ .- y))) + +Hinge(ŷ, y) = sum(max.(0.0, 1 .- ŷ .* y)) *1 // size(y,2) +