enable NoneValue parameter assignment for WeakScriptModule (#14715)
authorWanchao Liang <wanchaol@users.noreply.github.com>
Tue, 4 Dec 2018 04:38:53 +0000 (20:38 -0800)
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>
Tue, 4 Dec 2018 04:40:55 +0000 (20:40 -0800)
Summary:
This PR:

1. Handle None value attr in the WeakScriptModuleProxy
2. add back module tests that now passing
Pull Request resolved: https://github.com/pytorch/pytorch/pull/14715

Differential Revision: D13313573

Pulled By: wanchaol

fbshipit-source-id: a6b7892707350290a6d69b6f6270ad089bfc954b

test/test_jit.py
torch/jit/__init__.py
torch/nn/modules/batchnorm.py
torch/nn/modules/linear.py
torch/nn/modules/normalization.py

index 8dd70d3..b883099 100644 (file)
@@ -9689,23 +9689,9 @@ EXCLUDE_PYTHON_PRINT = {
 }
 
 EXCLUDE_SCRIPT_MODULES = {
-    'test_nn_LPPool2d_norm',
-    'test_nn_LPPool1d_norm',
-    'test_nn_BatchNorm1d_3d_input_not_affine',
-    'test_nn_BatchNorm1d_affine_simple_average',
-    'test_nn_BatchNorm1d_not_affine',
     'test_nn_BatchNorm1d_not_tracking_stats',
-    'test_nn_BatchNorm2d_2d_simple_average',
-    'test_nn_BatchNorm2d_not_affine',
     'test_nn_BatchNorm2d_not_tracking_stats',
-    'test_nn_BatchNorm3d_3d_simple_average',
-    'test_nn_BatchNorm3d_not_affine',
     'test_nn_BatchNorm3d_not_tracking_stats',
-    'test_nn_LayerNorm_1d_elementwise_affine',
-    'test_nn_LayerNorm_1d_no_elementwise_affine',
-    'test_nn_LayerNorm_3d_elementwise_affine',
-    'test_nn_LayerNorm_3d_no_elementwise_affine',
-    'test_nn_Linear_no_bias',
 }
 
 DISABLE_AUTODIFF_SUBGRAPH_INLINING = {
index c9e1435..b0a4dc3 100644 (file)
@@ -1146,18 +1146,23 @@ if _enabled:
             self.__dict__['_initialized'] = False
             super(WeakScriptModuleProxy, self).__init__()
 
-            # Copy constants
             self.__dict__["_original"] = weakref.ref(original)
-            self.__dict__["_constants_set"] = set(getattr(original, "__constants__", []))
 
             # Copy Parameters / Modules / Buffers
             for name in dir(original):
                 item = getattr(original, name)
-                if isinstance(item, Parameter) or (isinstance(item, Module) and item is not self):
+                if item is None and name in original._parameters:
+                    # XXX: treat None value simply as module attributes instead of adding them to the parameter list
+                    # TODO: need to handle this more generally when non-tensor attributes added to module
+                    object.__setattr__(self, name, item)
+                elif isinstance(item, Parameter) or (isinstance(item, Module) and item is not self):
                     ScriptModule.__setattr__(self, name, item)
             for name in original._buffers:
                 self.register_buffer(name, original._buffers[name])
 
+            # Copy constants
+            self.__dict__["_constants_set"] = set(getattr(original, "__constants__", []))
+
             self.__dict__["_initialized"] = True
             _create_methods_from_stubs(self, stubs)
 
index ec3385e..8d75d83 100644 (file)
@@ -13,7 +13,8 @@ from ..._jit_internal import weak_module, weak_script_method
 @weak_module
 class _BatchNorm(Module):
     _version = 2
-    __constants__ = ['training', 'track_running_stats', 'momentum', 'eps']
+    __constants__ = ['training', 'track_running_stats', 'momentum', 'eps',
+                     'weight', 'bias', 'running_mean', 'running_var', 'num_batches_tracked']
 
     def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True,
                  track_running_stats=True):
index 912f105..657f382 100644 (file)
@@ -42,6 +42,7 @@ class Linear(Module):
         >>> print(output.size())
         torch.Size([128, 30])
     """
+    __constants__ = ['bias']
 
     def __init__(self, in_features, out_features, bias=True):
         super(Linear, self).__init__()
@@ -109,7 +110,7 @@ class Bilinear(Module):
         >>> print(output.size())
         torch.Size([128, 40])
     """
-    __constants__ = ['in1_features', 'in2_features', 'out_features']
+    __constants__ = ['in1_features', 'in2_features', 'out_features', 'bias']
 
     def __init__(self, in1_features, in2_features, out_features, bias=True):
         super(Bilinear, self).__init__()
index b730821..3ff3325 100644 (file)
@@ -130,6 +130,8 @@ class LayerNorm(Module):
 
     .. _`Layer Normalization`: https://arxiv.org/abs/1607.06450
     """
+    __constants__ = ['normalized_shape', 'weight', 'bias', 'eps']
+
     def __init__(self, normalized_shape, eps=1e-5, elementwise_affine=True):
         super(LayerNorm, self).__init__()
         if isinstance(normalized_shape, numbers.Integral):