Fix FNet tokenizer tests (#13995)

This commit is contained in:
Lysandre Debut 2021-10-14 09:07:51 -04:00 committed by GitHub
parent f2002fea11
commit 7604557e44
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 3 additions and 3 deletions

View File

@ -17,7 +17,7 @@ import os
import unittest
from transformers import FNetTokenizer, FNetTokenizerFast
from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow
from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow, tooslow
from transformers.tokenization_utils import AddedToken
from .test_tokenization_common import TokenizerTesterMixin
@ -172,7 +172,7 @@ class FNetTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
self.assertTrue(special_token_id in p_output)
self.assertTrue(special_token_id in cr_output)
@slow
@tooslow
def test_special_tokens_initialization_from_slow(self):
for tokenizer, pretrained_name, kwargs in self.tokenizers_list:
with self.subTest(f"{tokenizer.__class__.__name__} ({pretrained_name})"):
@ -439,5 +439,5 @@ class FNetTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
self.tokenizer_integration_test_util(
expected_encoding=expected_encoding,
model_name="google/fnet-base",
revision="58e0d1f96af163dc8d0a84a2fddf4bd403e4e802",
revision="34219a71ca20e280cc6000b89673a169c65d605c",
)