From 5bc53ac5ef9a14ea41dc01c78055e3382f996e57 Mon Sep 17 00:00:00 2001 From: Howard Huang Date: Thu, 9 Sep 2021 08:20:40 -0700 Subject: [PATCH] Revert D30745961: [DDP] Remove self.modules_params Test Plan: revert-hammer Differential Revision: D30745961 (https://github.com/pytorch/pytorch/commit/8c095102948c9601792a884dad56da5085c51bee) Original commit changeset: 32d102502570 fbshipit-source-id: 59f7cc50d369b6cc2856cf4ebd0f58b96202336d --- torch/nn/parallel/distributed.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/torch/nn/parallel/distributed.py b/torch/nn/parallel/distributed.py index 734d42c..a1e24b1 100644 --- a/torch/nn/parallel/distributed.py +++ b/torch/nn/parallel/distributed.py @@ -734,6 +734,9 @@ class DistributedDataParallel(Module, Joinable): for replica in modules_and_parameters ] + # The following modules_params and modules_buffers are used for + # param/buffer sync in _sync_params. + self.modules_params = [list(self._get_parameters(self.module))] self._assign_modules_buffers() return parameters, expect_sparse_gradient -- 2.7.4