From 710a2e933f33145e33fdf669ef9fd5fb3cb50d18 Mon Sep 17 00:00:00 2001 From: "Tugsbayasgalan (Tugsuu) Manlaibaatar" Date: Sun, 29 Aug 2021 14:17:54 -0700 Subject: [PATCH] [DOC] Add doc for maybe_wrap_dim (#63161) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/63161 Test Plan: Imported from OSS Reviewed By: pbelevich Differential Revision: D30629451 Pulled By: tugsbayasgalan fbshipit-source-id: b03f030f197e10393a8ff223b240d23c30858028 --- aten/src/ATen/WrapDimUtils.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/aten/src/ATen/WrapDimUtils.h b/aten/src/ATen/WrapDimUtils.h index 2768efe..13e605c 100644 --- a/aten/src/ATen/WrapDimUtils.h +++ b/aten/src/ATen/WrapDimUtils.h @@ -7,6 +7,9 @@ namespace at { static inline int64_t maybe_wrap_dim(int64_t dim, int64_t dim_post_expr, bool wrap_scalar=true) { + // if dim_post_expr is 0 and wrap_scalar is true, then dim must be in the range [-1, 0]. + // This is a special case for scalar tensors and manifests in e.g. torch.sum(scalar_tensor, 0) + // Otherwise, dim should be in the range [-dim_post_expr, dim_post_expr-1]. return c10::maybe_wrap_dim(dim, dim_post_expr, wrap_scalar); } -- 2.7.4