add Bigbird ONNX config (#16427)

* add Bigbird ONNX config
This commit is contained in:
Minh Chien Vu 2022-04-13 03:46:06 +09:00 committed by GitHub
parent a960406722
commit 9c9db751e2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 30 additions and 4 deletions

View File

@ -49,6 +49,7 @@ Ready-made configurations include the following architectures:
- BART
- BEiT
- BERT
- BigBird
- Blenderbot
- BlenderbotSmall
- CamemBERT

View File

@ -28,7 +28,7 @@ from ...utils import (
_import_structure = {
"configuration_big_bird": ["BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdConfig"],
"configuration_big_bird": ["BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdConfig", "BigBirdOnnxConfig"],
}
if is_sentencepiece_available():
@ -66,7 +66,7 @@ if is_flax_available():
]
if TYPE_CHECKING:
from .configuration_big_bird import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdConfig
from .configuration_big_bird import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdConfig, BigBirdOnnxConfig
if is_sentencepiece_available():
from .tokenization_big_bird import BigBirdTokenizer

View File

@ -13,8 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" BigBird model configuration"""
from collections import OrderedDict
from typing import Mapping
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...utils import logging
@ -160,3 +163,14 @@ class BigBirdConfig(PretrainedConfig):
self.block_size = block_size
self.num_random_blocks = num_random_blocks
self.classifier_dropout = classifier_dropout
class BigBirdOnnxConfig(OnnxConfig):
@property
def inputs(self) -> Mapping[str, Mapping[int, str]]:
return OrderedDict(
[
("input_ids", {0: "batch", 1: "sequence"}),
("attention_mask", {0: "batch", 1: "sequence"}),
]
)

View File

@ -3000,7 +3000,7 @@ class BigBirdForQuestionAnswering(BigBirdPreTrainedModel):
# setting lengths logits to `-inf`
logits_mask = self.prepare_question_mask(question_lengths, seqlen)
if token_type_ids is None:
token_type_ids = (~logits_mask).long()
token_type_ids = torch.ones(logits_mask.size(), dtype=int) - logits_mask
logits_mask = logits_mask
logits_mask[:, 0] = False
logits_mask.unsqueeze_(2)
@ -3063,5 +3063,5 @@ class BigBirdForQuestionAnswering(BigBirdPreTrainedModel):
# q_lengths -> (bz, 1)
mask = torch.arange(0, maxlen).to(q_lengths.device)
mask.unsqueeze_(0) # -> (1, maxlen)
mask = mask < q_lengths
mask = torch.where(mask < q_lengths, 1, 0)
return mask

View File

@ -6,6 +6,7 @@ from ..models.albert import AlbertOnnxConfig
from ..models.bart import BartOnnxConfig
from ..models.beit import BeitOnnxConfig
from ..models.bert import BertOnnxConfig
from ..models.big_bird import BigBirdOnnxConfig
from ..models.blenderbot import BlenderbotOnnxConfig
from ..models.blenderbot_small import BlenderbotSmallOnnxConfig
from ..models.camembert import CamembertOnnxConfig
@ -156,6 +157,15 @@ class FeaturesManager:
"question-answering",
onnx_config_cls=BertOnnxConfig,
),
"bigbird": supported_features_mapping(
"default",
"masked-lm",
"causal-lm",
"sequence-classification",
"token-classification",
"question-answering",
onnx_config_cls=BigBirdOnnxConfig,
),
"ibert": supported_features_mapping(
"default",
"masked-lm",

View File

@ -172,6 +172,7 @@ class OnnxConfigWithPastTestCaseV2(TestCase):
PYTORCH_EXPORT_MODELS = {
("albert", "hf-internal-testing/tiny-albert"),
("bert", "bert-base-cased"),
("bigbird", "google/bigbird-roberta-base"),
("ibert", "kssteven/ibert-roberta-base"),
("camembert", "camembert-base"),
("distilbert", "distilbert-base-cased"),