From d986d4bf6354bc02d5e68eaaea60b02234a4449f Mon Sep 17 00:00:00 2001 From: kshitij12345 Date: Thu, 19 Aug 2021 12:40:37 -0700 Subject: [PATCH] [special] use __all__ to hide internal imports (#63135) Summary: Reference: https://github.com/pytorch/pytorch/issues/50345 Pull Request resolved: https://github.com/pytorch/pytorch/pull/63135 Reviewed By: ngimel Differential Revision: D30364287 Pulled By: mruberry fbshipit-source-id: 20078668943fafa45ce09610634b1d2c424b1922 --- torch/special/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/torch/special/__init__.py b/torch/special/__init__.py index 1f3b3fc..2fea9c6 100644 --- a/torch/special/__init__.py +++ b/torch/special/__init__.py @@ -1,9 +1,12 @@ -import sys - import torch from torch._C import _add_docstr, _special # type: ignore[attr-defined] from torch._torch_docs import common_args, multi_dim_common +__all__ = ['entr', 'psi', 'digamma', 'gammaln', 'polygamma', 'erf', 'erfc', 'erfinv', + 'erfcx', 'logit', 'logsumexp', 'expit', 'exp2', 'expm1', 'xlog1py', 'xlogy', + 'i0', 'i0e', 'i1', 'i1e', 'ndtr', 'ndtri', 'log1p', 'sinc', 'round', 'log_softmax', + 'zeta', 'multigammaln'] + Tensor = torch.Tensor entr = _add_docstr(_special.special_entr, -- 2.7.4