From 7f7085439cbe4eb9d5fff95b41d6345168398142 Mon Sep 17 00:00:00 2001 From: Evan Shelhamer Date: Wed, 29 Jul 2015 17:20:31 -0700 Subject: [PATCH] [docs] fix contrastive loss eq make documented equation match the correct implementation of the `max(margin - d, 0)^2` term in the loss. see #2321 --- include/caffe/loss_layers.hpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/include/caffe/loss_layers.hpp b/include/caffe/loss_layers.hpp index 86c3424..5282663 100644 --- a/include/caffe/loss_layers.hpp +++ b/include/caffe/loss_layers.hpp @@ -128,9 +128,9 @@ class LossLayer : public Layer { /** * @brief Computes the contrastive loss @f$ * E = \frac{1}{2N} \sum\limits_{n=1}^N \left(y\right) d + - * \left(1-y\right) \max \left(margin-d, 0\right) + * \left(1-y\right) \max \left(margin-d, 0\right)^2 * @f$ where @f$ - * d = \left| \left| a_n - b_n \right| \right|_2^2 @f$. This can be + * d = \left| \left| a_n - b_n \right| \right|_2 @f$. This can be * used to train siamese networks. * * @param bottom input Blob vector (length 3) @@ -144,9 +144,9 @@ class LossLayer : public Layer { * -# @f$ (1 \times 1 \times 1 \times 1) @f$ * the computed contrastive loss: @f$ E = * \frac{1}{2N} \sum\limits_{n=1}^N \left(y\right) d + - * \left(1-y\right) \max \left(margin-d, 0\right) + * \left(1-y\right) \max \left(margin-d, 0\right)^2 * @f$ where @f$ - * d = \left| \left| a_n - b_n \right| \right|_2^2 @f$. + * d = \left| \left| a_n - b_n \right| \right|_2 @f$. * This can be used to train siamese networks. */ template -- 2.7.4