)
if self.rowWise:
- assert self.engine == "SIMD", "Got {}".format(self.engine)
+ logger.info("Using engine {} for rowWise Adagrad".format(self.engine))
shapes, types = workspace.InferShapesAndTypes([param_init_net])
if str(param) not in shapes:
value=0.0
)
else:
+ logger.info("Using engine {} for regular Adagrad".format(self.engine))
+
if self.engine in FP16_ENGINES:
shapes, types = workspace.InferShapesAndTypes([param_init_net])
assert str(param) in shapes, shapes
workspace.FetchBlob(param)
+class TestRowWiseAdagrad(OptimizerTestBase, TestCase):
+ def build_optimizer(self, model, **kwargs):
+ self._skip_gpu = True
+ return build_adagrad(
+ model, base_learning_rate=1.0, lars=0.5, rowWise=True, **kwargs
+ )
+
+ def check_optimizer(self, optimizer):
+ self.assertFalse(optimizer.get_auxiliary_parameters().shared)
+ self.assertTrue(optimizer.get_auxiliary_parameters().local)
+ for param in optimizer.get_auxiliary_parameters().local:
+ workspace.FetchBlob(param)
+
+ def testDense(self):
+ raise unittest.SkipTest("no dense support")
+
+ def testGPUDense(self):
+ raise unittest.SkipTest("no dense support")
+
+
class TestWngrad(OptimizerTestBase, LRModificationTestBase, TestCase):
def build_optimizer(self, model, **kwargs):
self._skip_gpu = True