[tests] add the missing `require_torch_multi_gpu` flag (#30250)
add gpu flag
This commit is contained in:
parent
440bd3c3c0
commit
667939a2d3
|
@ -17,7 +17,7 @@
|
|||
import copy
|
||||
import unittest
|
||||
|
||||
from transformers.testing_utils import require_torch, slow, torch_device
|
||||
from transformers.testing_utils import require_torch, require_torch_multi_gpu, slow, torch_device
|
||||
from transformers.utils import is_torch_available
|
||||
|
||||
from ...test_configuration_common import ConfigTester
|
||||
|
@ -344,6 +344,7 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
|
|||
config_and_inputs = self.model_tester.prepare_config_and_inputs()
|
||||
self.model_tester.create_and_check_model(*config_and_inputs)
|
||||
|
||||
@require_torch_multi_gpu
|
||||
def test_multi_gpu_data_parallel_forward(self):
|
||||
super().test_multi_gpu_data_parallel_forward()
|
||||
|
||||
|
|
Loading…
Reference in New Issue