}
EXCLUDE_SCRIPT_MODULES = {
- 'test_nn_LPPool2d_norm',
- 'test_nn_LPPool1d_norm',
- 'test_nn_BatchNorm1d_3d_input_not_affine',
- 'test_nn_BatchNorm1d_affine_simple_average',
- 'test_nn_BatchNorm1d_not_affine',
'test_nn_BatchNorm1d_not_tracking_stats',
- 'test_nn_BatchNorm2d_2d_simple_average',
- 'test_nn_BatchNorm2d_not_affine',
'test_nn_BatchNorm2d_not_tracking_stats',
- 'test_nn_BatchNorm3d_3d_simple_average',
- 'test_nn_BatchNorm3d_not_affine',
'test_nn_BatchNorm3d_not_tracking_stats',
- 'test_nn_LayerNorm_1d_elementwise_affine',
- 'test_nn_LayerNorm_1d_no_elementwise_affine',
- 'test_nn_LayerNorm_3d_elementwise_affine',
- 'test_nn_LayerNorm_3d_no_elementwise_affine',
- 'test_nn_Linear_no_bias',
}
DISABLE_AUTODIFF_SUBGRAPH_INLINING = {
self.__dict__['_initialized'] = False
super(WeakScriptModuleProxy, self).__init__()
- # Copy constants
self.__dict__["_original"] = weakref.ref(original)
- self.__dict__["_constants_set"] = set(getattr(original, "__constants__", []))
# Copy Parameters / Modules / Buffers
for name in dir(original):
item = getattr(original, name)
- if isinstance(item, Parameter) or (isinstance(item, Module) and item is not self):
+ if item is None and name in original._parameters:
+ # XXX: treat None value simply as module attributes instead of adding them to the parameter list
+ # TODO: need to handle this more generally when non-tensor attributes added to module
+ object.__setattr__(self, name, item)
+ elif isinstance(item, Parameter) or (isinstance(item, Module) and item is not self):
ScriptModule.__setattr__(self, name, item)
for name in original._buffers:
self.register_buffer(name, original._buffers[name])
+ # Copy constants
+ self.__dict__["_constants_set"] = set(getattr(original, "__constants__", []))
+
self.__dict__["_initialized"] = True
_create_methods_from_stubs(self, stubs)
@weak_module
class _BatchNorm(Module):
_version = 2
- __constants__ = ['training', 'track_running_stats', 'momentum', 'eps']
+ __constants__ = ['training', 'track_running_stats', 'momentum', 'eps',
+ 'weight', 'bias', 'running_mean', 'running_var', 'num_batches_tracked']
def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True,
track_running_stats=True):
>>> print(output.size())
torch.Size([128, 30])
"""
+ __constants__ = ['bias']
def __init__(self, in_features, out_features, bias=True):
super(Linear, self).__init__()
>>> print(output.size())
torch.Size([128, 40])
"""
- __constants__ = ['in1_features', 'in2_features', 'out_features']
+ __constants__ = ['in1_features', 'in2_features', 'out_features', 'bias']
def __init__(self, in1_features, in2_features, out_features, bias=True):
super(Bilinear, self).__init__()
.. _`Layer Normalization`: https://arxiv.org/abs/1607.06450
"""
+ __constants__ = ['normalized_shape', 'weight', 'bias', 'eps']
+
def __init__(self, normalized_shape, eps=1e-5, elementwise_affine=True):
super(LayerNorm, self).__init__()
if isinstance(normalized_shape, numbers.Integral):