From ec886c8ce864721b4144cb749c458b3410c67946 Mon Sep 17 00:00:00 2001 From: thebhatman Date: Thu, 3 Oct 2019 21:13:09 +0530 Subject: [PATCH] Added docstring for hinge loss --- src/layers/stateless.jl | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/layers/stateless.jl b/src/layers/stateless.jl index c3dd22b0..8cdac33d 100644 --- a/src/layers/stateless.jl +++ b/src/layers/stateless.jl @@ -50,11 +50,6 @@ function normalise(x::AbstractArray; dims=1) return (x .- μ′) ./ σ′ end -function normalise(x::AbstractArray, dims) - Base.depwarn("`normalise(x::AbstractArray, dims)` is deprecated, use `normalise(a, dims=dims)` instead.", :normalise) - normalise(x, dims = dims) -end - """ Kullback Leibler Divergence(KL Divergence) KLDivergence is a measure of how much one probability distribution is different from the other. @@ -74,4 +69,8 @@ https://isaacchanghau.github.io/post/loss_functions/ """ poisson(ŷ, y) = sum(ŷ .- y .* log.(ŷ)) *1 // size(y,2) +""" + Hinge Loss function +Measures the loss given the prediction ŷ and true labels y(containing 1 or -1). This is usually used for measuring whether two inputs are similar or dissimilar +""" hinge(ŷ, y) = sum(max.(0, 1 .- ŷ .* y)) *1 // size(y,2)