Added docstring for hinge loss

This commit is contained in:
thebhatman 2019-10-03 21:13:09 +05:30
parent 2b30319a55
commit ec886c8ce8

View File

@ -50,11 +50,6 @@ function normalise(x::AbstractArray; dims=1)
return (x .- μ′) ./ σ
end
function normalise(x::AbstractArray, dims)
Base.depwarn("`normalise(x::AbstractArray, dims)` is deprecated, use `normalise(a, dims=dims)` instead.", :normalise)
normalise(x, dims = dims)
end
"""
Kullback Leibler Divergence(KL Divergence)
KLDivergence is a measure of how much one probability distribution is different from the other.
@ -74,4 +69,8 @@ https://isaacchanghau.github.io/post/loss_functions/
"""
poisson(, y) = sum( .- y .* log.()) *1 // size(y,2)
"""
Hinge Loss function
Measures the loss given the prediction and true labels y(containing 1 or -1). This is usually used for measuring whether two inputs are similar or dissimilar
"""
hinge(, y) = sum(max.(0, 1 .- .* y)) *1 // size(y,2)