From 6ec753f2f9f950e4a7ac3e2468cb073500b1d930 Mon Sep 17 00:00:00 2001 From: Derek Kim Date: Mon, 14 Jan 2019 02:38:36 -0800 Subject: [PATCH] Improve the docstring of nn.random.fork_rng (#15960) Summary: Improved the docstring of nn.random.fork_rng Pull Request resolved: https://github.com/pytorch/pytorch/pull/15960 Differential Revision: D13649929 Pulled By: soumith fbshipit-source-id: d3843179a2f1f838792c2f07f34deda2c06af56e --- torch/random.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/torch/random.py b/torch/random.py index 26cebf9..e71c425 100644 --- a/torch/random.py +++ b/torch/random.py @@ -56,10 +56,10 @@ def fork_rng(devices=None, enabled=True, _caller="fork_rng", _devices_kw="device the RNG. CPU RNG state is always forked. By default, :meth:`fork_rng` operates on all devices, but will emit a warning if your machine has a lot of devices, since this function will run very slowly in that case. - If you explicitly specify devices, this warning will be supressed + If you explicitly specify devices, this warning will be suppressed enabled (bool): if ``False``, the RNG is not forked. This is a convenience argument for easily disabling the context manager without having - to reindent your Python code. + to delete it and unindent your Python code under it. """ import torch.cuda -- 2.7.4