From 7cdf786a07b2ca434983a0a508e2e42c75a4697d Mon Sep 17 00:00:00 2001 From: Michael Carilli Date: Wed, 29 Jul 2020 13:12:34 -0700 Subject: [PATCH] fix typo in GradScaler docstring (#42236) Summary: Closes https://github.com/pytorch/pytorch/issues/42226. Pull Request resolved: https://github.com/pytorch/pytorch/pull/42236 Reviewed By: albanD Differential Revision: D22817980 Pulled By: ngimel fbshipit-source-id: 4326fe028dba1dbeed454edc4e4d4fffa56f51d6 --- torch/cuda/amp/grad_scaler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torch/cuda/amp/grad_scaler.py b/torch/cuda/amp/grad_scaler.py index a7dc2a3d3af07..066ff1a0d3117 100644 --- a/torch/cuda/amp/grad_scaler.py +++ b/torch/cuda/amp/grad_scaler.py @@ -93,7 +93,7 @@ class GradScaler(object): Arguments: init_scale (float, optional, default=2.**16): Initial scale factor. growth_factor (float, optional, default=2.0): Factor by which the scale is multiplied during - :meth:`update` if no inf/NaN gradients occur for ``growth_factor`` consecutive iterations. + :meth:`update` if no inf/NaN gradients occur for ``growth_interval`` consecutive iterations. backoff_factor (float, optional, default=0.5): Factor by which the scale is multiplied during :meth:`update` if inf/NaN gradients occur in an iteration. growth_interval (int, optional, default=2000): Number of consecutive iterations without inf/NaN gradients