From 8bd9465b79af9288d78deca8faddacbaa69a8abe Mon Sep 17 00:00:00 2001 From: Jongsoo Park Date: Thu, 14 Mar 2019 03:12:08 -0700 Subject: [PATCH] make momentum non negative in adagrad test (#18009) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/18009 momentum should be initialized with non-negative values Reviewed By: hyuen Differential Revision: D14450841 fbshipit-source-id: 5bbbd11645db9e6f2dc42b26a00ff3caf378c59f --- caffe2/python/operator_test/adagrad_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/caffe2/python/operator_test/adagrad_test.py b/caffe2/python/operator_test/adagrad_test.py index ff8c34e..0ae49d4 100644 --- a/caffe2/python/operator_test/adagrad_test.py +++ b/caffe2/python/operator_test/adagrad_test.py @@ -28,6 +28,7 @@ class TestAdagrad(serial.SerializedTestCase): ) def test_adagrad(self, inputs, lr, epsilon, gc, dc): param, momentum, grad = inputs + momentum = np.abs(momentum) lr = np.array([lr], dtype=np.float32) op = core.CreateOperator( @@ -57,6 +58,7 @@ class TestAdagrad(serial.SerializedTestCase): ) def test_adagrad_output_effective_lr(self, inputs, lr, epsilon, gc, dc): param, momentum, grad = inputs + momentum = np.abs(momentum) lr = np.array([lr], dtype=np.float32) op = core.CreateOperator( @@ -86,6 +88,7 @@ class TestAdagrad(serial.SerializedTestCase): ) def test_adagrad_output_effective_lr_and_update(self, inputs, lr, epsilon, gc, dc): param, momentum, grad = inputs + momentum = np.abs(momentum) lr = np.array([lr], dtype=np.float32) op = core.CreateOperator( -- 2.7.4