[tests] add the missing `require_torch_multi_gpu` flag (#30250)

add gpu flag
This commit is contained in:
Fanli Lin 2024-04-15 23:30:52 +08:00 committed by GitHub
parent 440bd3c3c0
commit 667939a2d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 2 additions and 1 deletions

View File

@ -17,7 +17,7 @@
import copy import copy
import unittest import unittest
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, require_torch_multi_gpu, slow, torch_device
from transformers.utils import is_torch_available from transformers.utils import is_torch_available
from ...test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
@ -344,6 +344,7 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs) self.model_tester.create_and_check_model(*config_and_inputs)
@require_torch_multi_gpu
def test_multi_gpu_data_parallel_forward(self): def test_multi_gpu_data_parallel_forward(self):
super().test_multi_gpu_data_parallel_forward() super().test_multi_gpu_data_parallel_forward()