Make `is_torch_bf16_available_on_device` more strict (#28796)
fix Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
0d26abdd3a
commit
eb8e7a005f
|
@ -359,6 +359,14 @@ def is_torch_fp16_available_on_device(device):
|
|||
try:
|
||||
x = torch.zeros(2, 2, dtype=torch.float16).to(device)
|
||||
_ = x @ x
|
||||
|
||||
# At this moment, let's be strict of the check: check if `LayerNorm` is also supported on device, because many
|
||||
# models use this layer.
|
||||
batch, sentence_length, embedding_dim = 3, 4, 5
|
||||
embedding = torch.randn(batch, sentence_length, embedding_dim, dtype=torch.float16, device=device)
|
||||
layer_norm = torch.nn.LayerNorm(embedding_dim, dtype=torch.float16, device=device)
|
||||
_ = layer_norm(embedding)
|
||||
|
||||
except: # noqa: E722
|
||||
# TODO: more precise exception matching, if possible.
|
||||
# most backends should return `RuntimeError` however this is not guaranteed.
|
||||
|
|
Loading…
Reference in New Issue