make momentum non negative in adagrad test (#18009)
authorJongsoo Park <jongsoo@fb.com>
Thu, 14 Mar 2019 10:12:08 +0000 (03:12 -0700)
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>
Thu, 14 Mar 2019 10:15:07 +0000 (03:15 -0700)
Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/18009

momentum should be initialized with non-negative values

Reviewed By: hyuen

Differential Revision: D14450841

fbshipit-source-id: 5bbbd11645db9e6f2dc42b26a00ff3caf378c59f

caffe2/python/operator_test/adagrad_test.py

index ff8c34e..0ae49d4 100644 (file)
@@ -28,6 +28,7 @@ class TestAdagrad(serial.SerializedTestCase):
     )
     def test_adagrad(self, inputs, lr, epsilon, gc, dc):
         param, momentum, grad = inputs
+        momentum = np.abs(momentum)
         lr = np.array([lr], dtype=np.float32)
 
         op = core.CreateOperator(
@@ -57,6 +58,7 @@ class TestAdagrad(serial.SerializedTestCase):
     )
     def test_adagrad_output_effective_lr(self, inputs, lr, epsilon, gc, dc):
         param, momentum, grad = inputs
+        momentum = np.abs(momentum)
         lr = np.array([lr], dtype=np.float32)
 
         op = core.CreateOperator(
@@ -86,6 +88,7 @@ class TestAdagrad(serial.SerializedTestCase):
     )
     def test_adagrad_output_effective_lr_and_update(self, inputs, lr, epsilon, gc, dc):
         param, momentum, grad = inputs
+        momentum = np.abs(momentum)
         lr = np.array([lr], dtype=np.float32)
 
         op = core.CreateOperator(