From: Rohan Varma Date: Thu, 9 Sep 2021 02:13:33 +0000 (-0700) Subject: [DDP] Remove self.modules_params (#64473) X-Git-Tag: accepted/tizen/8.0/unified/20231005.095509~351 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=8c095102948c9601792a884dad56da5085c51bee;p=platform%2Fupstream%2Fpytorch.git [DDP] Remove self.modules_params (#64473) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/64473 Unused after SPMD deprecated. ghstack-source-id: 137526305 Test Plan: CI Reviewed By: zhaojuanmao Differential Revision: D30745961 fbshipit-source-id: 32d102502570291e01579e5b47a6d74dc71013bb --- diff --git a/torch/nn/parallel/distributed.py b/torch/nn/parallel/distributed.py index a1e24b1..734d42c 100644 --- a/torch/nn/parallel/distributed.py +++ b/torch/nn/parallel/distributed.py @@ -734,9 +734,6 @@ class DistributedDataParallel(Module, Joinable): for replica in modules_and_parameters ] - # The following modules_params and modules_buffers are used for - # param/buffer sync in _sync_params. - self.modules_params = [list(self._get_parameters(self.module))] self._assign_modules_buffers() return parameters, expect_sparse_gradient