From 542c273e5b1739dc09582dfbf3f801a385105532 Mon Sep 17 00:00:00 2001 From: Deepali Chourasia Date: Mon, 18 Mar 2019 23:06:03 -0700 Subject: [PATCH] handle scenario when GPU support is not available and p2p_access_pattern is empty (#17974) Summary: Observed that when there is no GPU support available `workspace `sets `GetGpuPeerAccessPattern `to `[]` in https://github.com/pytorch/pytorch/blob/master/caffe2/python/workspace.py#L79 and this case is not handled in https://github.com/pytorch/pytorch/blob/master/caffe2/python/data_parallel_model.py#L1065. Pull Request resolved: https://github.com/pytorch/pytorch/pull/17974 Differential Revision: D14517066 Pulled By: ezyang fbshipit-source-id: 186911d95c07e9a55ab82a41d0c7c919e4281bb4 --- caffe2/python/data_parallel_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/caffe2/python/data_parallel_model.py b/caffe2/python/data_parallel_model.py index 3abe401..455b3dc 100644 --- a/caffe2/python/data_parallel_model.py +++ b/caffe2/python/data_parallel_model.py @@ -1062,7 +1062,7 @@ def _AllReduce(devices, model, net, param, use_nccl=False, control_input=None): for i, peer in enumerate(devices): if i == 0: continue # Skip the first device - if p2p_access_pattern is not None and not p2p_access_pattern[ + if p2p_access_pattern is not None and p2p_access_pattern.size and not p2p_access_pattern[ devices[0], peer ]: # Copy from peer to d0 -- 2.7.4