Added new loss functions.

This commit is contained in:
Manjunath Bhat 2019-03-12 02:31:42 +05:30 committed by GitHub
parent 22cb732657
commit 633f0df01f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -54,3 +54,31 @@ function normalise(x::AbstractArray, dims)
Base.depwarn("`normalise(x::AbstractArray, dims)` is deprecated, use `normalise(a, dims=dims)` instead.", :normalise)
normalise(x, dims = dims)
end
"""
Kullback Leibler Divergence(KL Divergence)
KLDivergence is a measure of how much one probability distribution is different from the other.
It is always non-negative and zero only when both the distributions are equal everywhere.
"""
function KLDivergence(, y)
entropy = sum(y .* log.(y)) *1 //size(y,2)
cross_entropy = crossentropy(, y)
return entropy + cross_entropy
end
"""
Poisson Loss function
Poisson loss function is a measure of how the predicted distribution diverges from the expected distribution.
"""
Poisson(, y) = sum( .- y .* log.()) *1 // size(y,2)
"""
Logcosh Loss function
"""
logcosh(, y) = sum(log.(cosh.( .- y)))
Hinge(, y) = sum(max.(0.0, 1 .- .* y)) *1 // size(y,2)