torch.cuda.is_available() is redundant as apex handles that internally (#9350)

This commit is contained in:
Stas Bekman 2020-12-30 01:09:51 -08:00 committed by GitHub
parent 8217d4e37f
commit ae333d04b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 15 additions and 19 deletions

View File

@ -110,13 +110,12 @@ def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int]
def BartLayerNorm(normalized_shape: torch.Size, eps: float = 1e-5, elementwise_affine: bool = True):
if torch.cuda.is_available():
try:
from apex.normalization import FusedLayerNorm
try:
from apex.normalization import FusedLayerNorm
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
except ImportError:
pass
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
except ImportError:
pass
return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine)

View File

@ -265,14 +265,12 @@ FSMT_INPUTS_DOCSTRING = r"""
have_fused_layer_norm = False
if torch.cuda.is_available():
try:
from apex.normalization import FusedLayerNorm
have_fused_layer_norm = True
except ImportError:
pass
try:
from apex.normalization import FusedLayerNorm
have_fused_layer_norm = True
except ImportError:
pass
LayerNorm = FusedLayerNorm if have_fused_layer_norm else torch.nn.LayerNorm

View File

@ -511,13 +511,12 @@ class ProphetNetDecoderLMOutput(ModelOutput):
def ProphetNetLayerNorm(normalized_shape, eps=1e-5, elementwise_affine=True):
if torch.cuda.is_available():
try:
from apex.normalization import FusedLayerNorm
try:
from apex.normalization import FusedLayerNorm
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
except ImportError:
pass
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
except ImportError:
pass
return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine)