Added new loss functions.
This commit is contained in:
parent
22cb732657
commit
633f0df01f
@ -54,3 +54,31 @@ function normalise(x::AbstractArray, dims)
|
||||
Base.depwarn("`normalise(x::AbstractArray, dims)` is deprecated, use `normalise(a, dims=dims)` instead.", :normalise)
|
||||
normalise(x, dims = dims)
|
||||
end
|
||||
|
||||
"""
|
||||
Kullback Leibler Divergence(KL Divergence)
|
||||
KLDivergence is a measure of how much one probability distribution is different from the other.
|
||||
It is always non-negative and zero only when both the distributions are equal everywhere.
|
||||
|
||||
"""
|
||||
function KLDivergence(ŷ, y)
|
||||
entropy = sum(y .* log.(y)) *1 //size(y,2)
|
||||
cross_entropy = crossentropy(ŷ, y)
|
||||
return entropy + cross_entropy
|
||||
end
|
||||
|
||||
"""
|
||||
Poisson Loss function
|
||||
Poisson loss function is a measure of how the predicted distribution diverges from the expected distribution.
|
||||
|
||||
"""
|
||||
Poisson(ŷ, y) = sum(ŷ .- y .* log.(ŷ)) *1 // size(y,2)
|
||||
|
||||
"""
|
||||
Logcosh Loss function
|
||||
"""
|
||||
|
||||
logcosh(ŷ, y) = sum(log.(cosh.(ŷ .- y)))
|
||||
|
||||
Hinge(ŷ, y) = sum(max.(0.0, 1 .- ŷ .* y)) *1 // size(y,2)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user