Removal of deprecated maps (#30576)

* [test_all] Remove all imports

Remove remaining ARCHIVE MAPS

Remove remaining PRETRAINED maps

* review comments

* [test_all] empty commit to trigger tests
This commit is contained in:
Lysandre Debut 2024-05-09 14:15:56 +02:00 committed by GitHub
parent 8c5b3c19cf
commit 297b732bdf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
723 changed files with 584 additions and 7268 deletions

File diff suppressed because it is too large Load Diff

View File

@ -26,7 +26,7 @@ from ...utils import (
_import_structure = {
"configuration_albert": ["ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "AlbertConfig", "AlbertOnnxConfig"],
"configuration_albert": ["AlbertConfig", "AlbertOnnxConfig"],
}
try:
@ -52,7 +52,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_albert"] = [
"ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"AlbertForMaskedLM",
"AlbertForMultipleChoice",
"AlbertForPreTraining",
@ -71,7 +70,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_albert"] = [
"TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAlbertForMaskedLM",
"TFAlbertForMultipleChoice",
"TFAlbertForPreTraining",
@ -101,7 +99,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig, AlbertOnnxConfig
from .configuration_albert import AlbertConfig, AlbertOnnxConfig
try:
if not is_sentencepiece_available():
@ -126,7 +124,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_albert import (
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
AlbertForMaskedLM,
AlbertForMultipleChoice,
AlbertForPreTraining,
@ -145,7 +142,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_albert import (
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAlbertForMaskedLM,
TFAlbertForMultipleChoice,
TFAlbertForPreTraining,

View File

@ -19,7 +19,6 @@ from typing import Mapping
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ..deprecated._archive_maps import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class AlbertConfig(PretrainedConfig):

View File

@ -52,9 +52,6 @@ _CHECKPOINT_FOR_DOC = "albert/albert-base-v2"
_CONFIG_FOR_DOC = "AlbertConfig"
from ..deprecated._archive_maps import ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def load_tf_weights_in_albert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:

View File

@ -66,9 +66,6 @@ _CHECKPOINT_FOR_DOC = "albert/albert-base-v2"
_CONFIG_FOR_DOC = "AlbertConfig"
from ..deprecated._archive_maps import TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
class TFAlbertPreTrainingLoss:
"""
Loss function suitable for ALBERT pretraining, that is, the task of pretraining a language model by combining SOP +

View File

@ -22,7 +22,6 @@ from ...utils import (
_import_structure = {
"configuration_align": [
"ALIGN_PRETRAINED_CONFIG_ARCHIVE_MAP",
"AlignConfig",
"AlignTextConfig",
"AlignVisionConfig",
@ -37,7 +36,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_align"] = [
"ALIGN_PRETRAINED_MODEL_ARCHIVE_LIST",
"AlignModel",
"AlignPreTrainedModel",
"AlignTextModel",
@ -46,7 +44,6 @@ else:
if TYPE_CHECKING:
from .configuration_align import (
ALIGN_PRETRAINED_CONFIG_ARCHIVE_MAP,
AlignConfig,
AlignTextConfig,
AlignVisionConfig,
@ -60,7 +57,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_align import (
ALIGN_PRETRAINED_MODEL_ARCHIVE_LIST,
AlignModel,
AlignPreTrainedModel,
AlignTextModel,

View File

@ -28,9 +28,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import ALIGN_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class AlignTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`AlignTextModel`]. It is used to instantiate a

View File

@ -47,9 +47,6 @@ _CHECKPOINT_FOR_DOC = "kakaobrain/align-base"
_CONFIG_FOR_DOC = "AlignConfig"
from ..deprecated._archive_maps import ALIGN_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
ALIGN_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_
_import_structure = {
"configuration_altclip": [
"ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"AltCLIPConfig",
"AltCLIPTextConfig",
"AltCLIPVisionConfig",
@ -33,7 +32,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_altclip"] = [
"ALTCLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"AltCLIPPreTrainedModel",
"AltCLIPModel",
"AltCLIPTextModel",
@ -43,7 +41,6 @@ else:
if TYPE_CHECKING:
from .configuration_altclip import (
ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP,
AltCLIPConfig,
AltCLIPTextConfig,
AltCLIPVisionConfig,
@ -57,7 +54,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_altclip import (
ALTCLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
AltCLIPModel,
AltCLIPPreTrainedModel,
AltCLIPTextModel,

View File

@ -23,9 +23,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class AltCLIPTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`AltCLIPTextModel`]. It is used to instantiate a

View File

@ -41,9 +41,6 @@ _CHECKPOINT_FOR_DOC = "BAAI/AltCLIP"
_CONFIG_FOR_DOC = "AltCLIPConfig"
from ..deprecated._archive_maps import ALTCLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
ALTCLIP_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads

View File

@ -17,10 +17,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_audio_spectrogram_transformer": [
"AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP",
"ASTConfig",
],
"configuration_audio_spectrogram_transformer": ["ASTConfig"],
"feature_extraction_audio_spectrogram_transformer": ["ASTFeatureExtractor"],
}
@ -31,7 +28,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_audio_spectrogram_transformer"] = [
"AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"ASTForAudioClassification",
"ASTModel",
"ASTPreTrainedModel",
@ -40,7 +36,6 @@ else:
if TYPE_CHECKING:
from .configuration_audio_spectrogram_transformer import (
AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,
ASTConfig,
)
from .feature_extraction_audio_spectrogram_transformer import ASTFeatureExtractor
@ -52,7 +47,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_audio_spectrogram_transformer import (
AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
ASTForAudioClassification,
ASTModel,
ASTPreTrainedModel,

View File

@ -22,9 +22,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ASTConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ASTModel`]. It is used to instantiate an AST

View File

@ -45,9 +45,6 @@ _SEQ_CLASS_EXPECTED_OUTPUT = "'Speech'"
_SEQ_CLASS_EXPECTED_LOSS = 0.17
from ..deprecated._archive_maps import AUDIO_SPECTROGRAM_TRANSFORMER_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
class ASTEmbeddings(nn.Module):
"""
Construct the CLS token, position and patch embeddings.

View File

@ -25,7 +25,7 @@ from ...utils import (
_import_structure = {
"auto_factory": ["get_values"],
"configuration_auto": ["ALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CONFIG_MAPPING", "MODEL_NAMES_MAPPING", "AutoConfig"],
"configuration_auto": ["CONFIG_MAPPING", "MODEL_NAMES_MAPPING", "AutoConfig"],
"feature_extraction_auto": ["FEATURE_EXTRACTOR_MAPPING", "AutoFeatureExtractor"],
"image_processing_auto": ["IMAGE_PROCESSOR_MAPPING", "AutoImageProcessor"],
"processing_auto": ["PROCESSOR_MAPPING", "AutoProcessor"],
@ -213,7 +213,7 @@ else:
if TYPE_CHECKING:
from .auto_factory import get_values
from .configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP, CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .configuration_auto import CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .feature_extraction_auto import FEATURE_EXTRACTOR_MAPPING, AutoFeatureExtractor
from .image_processing_auto import IMAGE_PROCESSOR_MAPPING, AutoImageProcessor
from .processing_auto import PROCESSOR_MAPPING, AutoProcessor

View File

@ -28,9 +28,6 @@ from ...utils import CONFIG_NAME, logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CONFIG_ARCHIVE_MAP_MAPPING_NAMES # noqa: F401, E402
CONFIG_MAPPING_NAMES = OrderedDict(
[
# Add configs here
@ -982,6 +979,3 @@ class AutoConfig:
"match!"
)
CONFIG_MAPPING.register(model_type, config, exist_ok=exist_ok)
ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = _LazyLoadAllMappings(CONFIG_ARCHIVE_MAP_MAPPING_NAMES)

View File

@ -18,10 +18,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_autoformer": [
"AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP",
"AutoformerConfig",
],
"configuration_autoformer": ["AutoformerConfig"],
}
try:
@ -31,7 +28,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_autoformer"] = [
"AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"AutoformerForPrediction",
"AutoformerModel",
"AutoformerPreTrainedModel",
@ -40,7 +36,6 @@ else:
if TYPE_CHECKING:
from .configuration_autoformer import (
AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,
AutoformerConfig,
)
@ -51,7 +46,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_autoformer import (
AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
AutoformerForPrediction,
AutoformerModel,
AutoformerPreTrainedModel,

View File

@ -23,9 +23,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class AutoformerConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of an [`AutoformerModel`]. It is used to instantiate an

View File

@ -167,9 +167,6 @@ class AutoformerModelOutput(ModelOutput):
static_features: Optional[torch.FloatTensor] = None
from ..deprecated._archive_maps import AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.time_series_transformer.modeling_time_series_transformer.TimeSeriesFeatureEmbedder with TimeSeries->Autoformer
class AutoformerFeatureEmbedder(nn.Module):
"""

View File

@ -22,7 +22,6 @@ from ...utils import (
_import_structure = {
"configuration_bark": [
"BARK_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BarkCoarseConfig",
"BarkConfig",
"BarkFineConfig",
@ -38,7 +37,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bark"] = [
"BARK_PRETRAINED_MODEL_ARCHIVE_LIST",
"BarkFineModel",
"BarkSemanticModel",
"BarkCoarseModel",
@ -49,7 +47,6 @@ else:
if TYPE_CHECKING:
from .configuration_bark import (
BARK_PRETRAINED_CONFIG_ARCHIVE_MAP,
BarkCoarseConfig,
BarkConfig,
BarkFineConfig,
@ -64,7 +61,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bark import (
BARK_PRETRAINED_MODEL_ARCHIVE_LIST,
BarkCausalModel,
BarkCoarseModel,
BarkFineModel,

View File

@ -64,9 +64,6 @@ _CHECKPOINT_FOR_DOC = "suno/bark-small"
_CONFIG_FOR_DOC = "BarkConfig"
from ..deprecated._archive_maps import BARK_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.llama.modeling_llama._get_unpad_data
def _get_unpad_data(attention_mask):
seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)

View File

@ -24,7 +24,7 @@ from ...utils import (
_import_structure = {
"configuration_bart": ["BART_PRETRAINED_CONFIG_ARCHIVE_MAP", "BartConfig", "BartOnnxConfig"],
"configuration_bart": ["BartConfig", "BartOnnxConfig"],
"tokenization_bart": ["BartTokenizer"],
}
@ -43,7 +43,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bart"] = [
"BART_PRETRAINED_MODEL_ARCHIVE_LIST",
"BartForCausalLM",
"BartForConditionalGeneration",
"BartForQuestionAnswering",
@ -84,7 +83,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_bart import BART_PRETRAINED_CONFIG_ARCHIVE_MAP, BartConfig, BartOnnxConfig
from .configuration_bart import BartConfig, BartOnnxConfig
from .tokenization_bart import BartTokenizer
try:
@ -102,7 +101,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bart import (
BART_PRETRAINED_MODEL_ARCHIVE_LIST,
BartForCausalLM,
BartForConditionalGeneration,
BartForQuestionAnswering,

View File

@ -78,9 +78,6 @@ _QA_EXPECTED_LOSS = 0.59
_QA_EXPECTED_OUTPUT = "' nice puppet'"
from ..deprecated._archive_maps import BART_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.llama.modeling_llama._get_unpad_data
def _get_unpad_data(attention_mask):
seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32)

View File

@ -23,7 +23,7 @@ from ...utils import (
)
_import_structure = {"configuration_beit": ["BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BeitConfig", "BeitOnnxConfig"]}
_import_structure = {"configuration_beit": ["BeitConfig", "BeitOnnxConfig"]}
try:
if not is_vision_available():
@ -41,7 +41,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_beit"] = [
"BEIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BeitForImageClassification",
"BeitForMaskedImageModeling",
"BeitForSemanticSegmentation",
@ -65,7 +64,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_beit import BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP, BeitConfig, BeitOnnxConfig
from .configuration_beit import BeitConfig, BeitOnnxConfig
try:
if not is_vision_available():
@ -83,7 +82,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_beit import (
BEIT_PRETRAINED_MODEL_ARCHIVE_LIST,
BeitBackbone,
BeitForImageClassification,
BeitForMaskedImageModeling,

View File

@ -27,9 +27,6 @@ from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_feat
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BeitConfig(BackboneConfigMixin, PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BeitModel`]. It is used to instantiate an BEiT

View File

@ -61,9 +61,6 @@ _IMAGE_CLASS_CHECKPOINT = "microsoft/beit-base-patch16-224"
_IMAGE_CLASS_EXPECTED_OUTPUT = "tabby, tabby cat"
from ..deprecated._archive_maps import BEIT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
@dataclass
class BeitModelOutputWithPooling(BaseModelOutputWithPooling):
"""

View File

@ -26,7 +26,7 @@ from ...utils import (
_import_structure = {
"configuration_bert": ["BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BertConfig", "BertOnnxConfig"],
"configuration_bert": ["BertConfig", "BertOnnxConfig"],
"tokenization_bert": ["BasicTokenizer", "BertTokenizer", "WordpieceTokenizer"],
}
@ -45,7 +45,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bert"] = [
"BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BertForMaskedLM",
"BertForMultipleChoice",
"BertForNextSentencePrediction",
@ -67,7 +66,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_bert"] = [
"TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFBertEmbeddings",
"TFBertForMaskedLM",
"TFBertForMultipleChoice",
@ -109,7 +107,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig, BertOnnxConfig
from .configuration_bert import BertConfig, BertOnnxConfig
from .tokenization_bert import BasicTokenizer, BertTokenizer, WordpieceTokenizer
try:
@ -127,7 +125,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bert import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertForMaskedLM,
BertForMultipleChoice,
BertForNextSentencePrediction,
@ -149,7 +146,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_bert import (
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBertEmbeddings,
TFBertForMaskedLM,
TFBertForMultipleChoice,

View File

@ -25,9 +25,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BertConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BertModel`] or a [`TFBertModel`]. It is used to

View File

@ -82,9 +82,6 @@ _SEQ_CLASS_EXPECTED_OUTPUT = "'LABEL_1'"
_SEQ_CLASS_EXPECTED_LOSS = 0.01
from ..deprecated._archive_maps import BERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def load_tf_weights_in_bert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:

View File

@ -90,9 +90,6 @@ _SEQ_CLASS_EXPECTED_OUTPUT = "'LABEL_1'"
_SEQ_CLASS_EXPECTED_LOSS = 0.01
from ..deprecated._archive_maps import TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
class TFBertPreTrainingLoss:
"""
Loss function suitable for BERT-like pretraining, that is, the task of pretraining a language model by combining

View File

@ -25,7 +25,7 @@ from ...utils import (
_import_structure = {
"configuration_big_bird": ["BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP", "BigBirdConfig", "BigBirdOnnxConfig"],
"configuration_big_bird": ["BigBirdConfig", "BigBirdOnnxConfig"],
}
try:
@ -51,7 +51,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_big_bird"] = [
"BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST",
"BigBirdForCausalLM",
"BigBirdForMaskedLM",
"BigBirdForMultipleChoice",
@ -84,7 +83,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_big_bird import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP, BigBirdConfig, BigBirdOnnxConfig
from .configuration_big_bird import BigBirdConfig, BigBirdOnnxConfig
try:
if not is_sentencepiece_available():
@ -109,7 +108,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_big_bird import (
BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST,
BigBirdForCausalLM,
BigBirdForMaskedLM,
BigBirdForMultipleChoice,

View File

@ -24,9 +24,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BigBirdConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BigBirdModel`]. It is used to instantiate an

View File

@ -55,9 +55,6 @@ _CHECKPOINT_FOR_DOC = "google/bigbird-roberta-base"
_CONFIG_FOR_DOC = "BigBirdConfig"
from ..deprecated._archive_maps import BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
_TRIVIA_QA_MAPPING = {
"big_bird_attention": "attention/self",
"output_layer_norm": "output/LayerNorm",

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_bigbird_pegasus": [
"BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BigBirdPegasusConfig",
"BigBirdPegasusOnnxConfig",
],
@ -31,7 +30,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bigbird_pegasus"] = [
"BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST",
"BigBirdPegasusForCausalLM",
"BigBirdPegasusForConditionalGeneration",
"BigBirdPegasusForQuestionAnswering",
@ -43,7 +41,6 @@ else:
if TYPE_CHECKING:
from .configuration_bigbird_pegasus import (
BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP,
BigBirdPegasusConfig,
BigBirdPegasusOnnxConfig,
)
@ -55,7 +52,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bigbird_pegasus import (
BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST,
BigBirdPegasusForCausalLM,
BigBirdPegasusForConditionalGeneration,
BigBirdPegasusForQuestionAnswering,

View File

@ -27,9 +27,6 @@ from ...utils import TensorType, is_torch_available, logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BigBirdPegasusConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BigBirdPegasusModel`]. It is used to instantiate

View File

@ -54,9 +54,6 @@ _CONFIG_FOR_DOC = "BigBirdPegasusConfig"
_EXPECTED_OUTPUT_SHAPE = [1, 7, 1024]
from ..deprecated._archive_maps import BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""
Shift input ids one token to the right.

View File

@ -17,7 +17,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_
_import_structure = {
"configuration_biogpt": ["BIOGPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BioGptConfig"],
"configuration_biogpt": ["BioGptConfig"],
"tokenization_biogpt": ["BioGptTokenizer"],
}
@ -28,7 +28,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_biogpt"] = [
"BIOGPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BioGptForCausalLM",
"BioGptForTokenClassification",
"BioGptForSequenceClassification",
@ -38,7 +37,7 @@ else:
if TYPE_CHECKING:
from .configuration_biogpt import BIOGPT_PRETRAINED_CONFIG_ARCHIVE_MAP, BioGptConfig
from .configuration_biogpt import BioGptConfig
from .tokenization_biogpt import BioGptTokenizer
try:
@ -48,7 +47,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_biogpt import (
BIOGPT_PRETRAINED_MODEL_ARCHIVE_LIST,
BioGptForCausalLM,
BioGptForSequenceClassification,
BioGptForTokenClassification,

View File

@ -21,9 +21,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BIOGPT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BioGptConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BioGptModel`]. It is used to instantiate an

View File

@ -47,9 +47,6 @@ _CHECKPOINT_FOR_DOC = "microsoft/biogpt"
_CONFIG_FOR_DOC = "BioGptConfig"
from ..deprecated._archive_maps import BIOGPT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.opt.modeling_opt.OPTLearnedPositionalEmbedding with OPT->BioGpt
class BioGptLearnedPositionalEmbedding(nn.Embedding):
"""

View File

@ -16,7 +16,7 @@ from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available
_import_structure = {"configuration_bit": ["BIT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BitConfig", "BitOnnxConfig"]}
_import_structure = {"configuration_bit": ["BitConfig", "BitOnnxConfig"]}
try:
if not is_torch_available():
@ -25,7 +25,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bit"] = [
"BIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BitForImageClassification",
"BitModel",
"BitPreTrainedModel",
@ -43,7 +42,7 @@ else:
if TYPE_CHECKING:
from .configuration_bit import BIT_PRETRAINED_CONFIG_ARCHIVE_MAP, BitConfig, BitOnnxConfig
from .configuration_bit import BitConfig, BitOnnxConfig
try:
if not is_torch_available():
@ -52,7 +51,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bit import (
BIT_PRETRAINED_MODEL_ARCHIVE_LIST,
BitBackbone,
BitForImageClassification,
BitModel,

View File

@ -22,9 +22,6 @@ from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_feat
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BIT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BitConfig(BackboneConfigMixin, PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BitModel`]. It is used to instantiate an BiT

View File

@ -57,9 +57,6 @@ _IMAGE_CLASS_CHECKPOINT = "google/bit-50"
_IMAGE_CLASS_EXPECTED_OUTPUT = "tiger cat"
from ..deprecated._archive_maps import BIT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def get_padding_value(padding=None, kernel_size=7, stride=1, dilation=1) -> Tuple[Tuple, bool]:
r"""
Utility function to get the tuple padding value given the kernel_size and padding.

View File

@ -26,7 +26,6 @@ from ...utils import (
_import_structure = {
"configuration_blenderbot": [
"BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BlenderbotConfig",
"BlenderbotOnnxConfig",
],
@ -48,7 +47,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_blenderbot"] = [
"BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotForCausalLM",
"BlenderbotForConditionalGeneration",
"BlenderbotModel",
@ -84,7 +82,6 @@ else:
if TYPE_CHECKING:
from .configuration_blenderbot import (
BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP,
BlenderbotConfig,
BlenderbotOnnxConfig,
)
@ -105,7 +102,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_blenderbot import (
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotForCausalLM,
BlenderbotForConditionalGeneration,
BlenderbotModel,

View File

@ -28,9 +28,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BlenderbotConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BlenderbotModel`]. It is used to instantiate an

View File

@ -53,9 +53,6 @@ _CONFIG_FOR_DOC = "BlenderbotConfig"
_CHECKPOINT_FOR_DOC = "facebook/blenderbot-400M-distill"
from ..deprecated._archive_maps import BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.bart.modeling_bart.shift_tokens_right
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""

View File

@ -25,7 +25,6 @@ from ...utils import (
_import_structure = {
"configuration_blenderbot_small": [
"BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BlenderbotSmallConfig",
"BlenderbotSmallOnnxConfig",
],
@ -47,7 +46,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_blenderbot_small"] = [
"BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotSmallForCausalLM",
"BlenderbotSmallForConditionalGeneration",
"BlenderbotSmallModel",
@ -80,7 +78,6 @@ else:
if TYPE_CHECKING:
from .configuration_blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP,
BlenderbotSmallConfig,
BlenderbotSmallOnnxConfig,
)
@ -101,7 +98,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotSmallForCausalLM,
BlenderbotSmallForConditionalGeneration,
BlenderbotSmallModel,

View File

@ -27,8 +27,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BlenderbotSmallConfig(PretrainedConfig):
r"""

View File

@ -49,9 +49,6 @@ logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "BlenderbotSmallConfig"
from ..deprecated._archive_maps import BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.bart.modeling_bart.shift_tokens_right
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""

View File

@ -24,7 +24,6 @@ from ...utils import (
_import_structure = {
"configuration_blip": [
"BLIP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BlipConfig",
"BlipTextConfig",
"BlipVisionConfig",
@ -48,7 +47,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_blip"] = [
"BLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlipModel",
"BlipPreTrainedModel",
"BlipForConditionalGeneration",
@ -65,7 +63,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_blip"] = [
"TF_BLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFBlipModel",
"TFBlipPreTrainedModel",
"TFBlipForConditionalGeneration",
@ -76,7 +73,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_blip import BLIP_PRETRAINED_CONFIG_ARCHIVE_MAP, BlipConfig, BlipTextConfig, BlipVisionConfig
from .configuration_blip import BlipConfig, BlipTextConfig, BlipVisionConfig
from .processing_blip import BlipProcessor
try:
@ -94,7 +91,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_blip import (
BLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
BlipForConditionalGeneration,
BlipForImageTextRetrieval,
BlipForQuestionAnswering,
@ -111,7 +107,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_blip import (
TF_BLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBlipForConditionalGeneration,
TFBlipForImageTextRetrieval,
TFBlipForQuestionAnswering,

View File

@ -24,9 +24,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BLIP_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BlipTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BlipTextModel`]. It is used to instantiate a BLIP

View File

@ -42,9 +42,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "Salesforce/blip-vqa-base"
from ..deprecated._archive_maps import BLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.clip.modeling_clip.contrastive_loss
def contrastive_loss(logits: torch.Tensor) -> torch.Tensor:
return nn.functional.cross_entropy(logits, torch.arange(len(logits), device=logits.device))

View File

@ -49,9 +49,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "Salesforce/blip-vqa-base"
from ..deprecated._archive_maps import TF_BLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.clip.modeling_tf_clip.contrastive_loss
def contrastive_loss(logits: tf.Tensor) -> tf.Tensor:
return tf.math.reduce_mean(

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_blip_2": [
"BLIP_2_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Blip2Config",
"Blip2QFormerConfig",
"Blip2VisionConfig",
@ -33,7 +32,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_blip_2"] = [
"BLIP_2_PRETRAINED_MODEL_ARCHIVE_LIST",
"Blip2Model",
"Blip2QFormerModel",
"Blip2PreTrainedModel",
@ -43,7 +41,6 @@ else:
if TYPE_CHECKING:
from .configuration_blip_2 import (
BLIP_2_PRETRAINED_CONFIG_ARCHIVE_MAP,
Blip2Config,
Blip2QFormerConfig,
Blip2VisionConfig,
@ -57,7 +54,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_blip_2 import (
BLIP_2_PRETRAINED_MODEL_ARCHIVE_LIST,
Blip2ForConditionalGeneration,
Blip2Model,
Blip2PreTrainedModel,

View File

@ -26,9 +26,6 @@ from ..auto import CONFIG_MAPPING
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BLIP_2_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class Blip2VisionConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`Blip2VisionModel`]. It is used to instantiate a

View File

@ -48,9 +48,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "Salesforce/blip2-opt-2.7b"
from ..deprecated._archive_maps import BLIP_2_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
@dataclass
class Blip2ForConditionalGenerationModelOutput(ModelOutput):
"""

View File

@ -24,7 +24,7 @@ from ...utils import (
_import_structure = {
"configuration_bloom": ["BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP", "BloomConfig", "BloomOnnxConfig"],
"configuration_bloom": ["BloomConfig", "BloomOnnxConfig"],
}
try:
if not is_tokenizers_available():
@ -41,7 +41,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bloom"] = [
"BLOOM_PRETRAINED_MODEL_ARCHIVE_LIST",
"BloomForCausalLM",
"BloomModel",
"BloomPreTrainedModel",
@ -64,7 +63,7 @@ else:
if TYPE_CHECKING:
from .configuration_bloom import BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP, BloomConfig, BloomOnnxConfig
from .configuration_bloom import BloomConfig, BloomOnnxConfig
try:
if not is_tokenizers_available():
@ -81,7 +80,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bloom import (
BLOOM_PRETRAINED_MODEL_ARCHIVE_LIST,
BloomForCausalLM,
BloomForQuestionAnswering,
BloomForSequenceClassification,

View File

@ -30,9 +30,6 @@ from ...utils import is_torch_available, logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BloomConfig(PretrainedConfig):
"""
This is the configuration class to store the configuration of a [`BloomModel`]. It is used to instantiate a Bloom

View File

@ -44,9 +44,6 @@ _CHECKPOINT_FOR_DOC = "bigscience/bloom-560m"
_CONFIG_FOR_DOC = "BloomConfig"
from ..deprecated._archive_maps import BLOOM_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def build_alibi_tensor(attention_mask: torch.Tensor, num_heads: int, dtype: torch.dtype) -> torch.Tensor:
"""
Link to paper: https://arxiv.org/abs/2108.12409 Alibi tensor is not causal as the original paper mentions, it

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_bridgetower": [
"BRIDGETOWER_PRETRAINED_CONFIG_ARCHIVE_MAP",
"BridgeTowerConfig",
"BridgeTowerTextConfig",
"BridgeTowerVisionConfig",
@ -41,7 +40,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bridgetower"] = [
"BRIDGETOWER_PRETRAINED_MODEL_ARCHIVE_LIST",
"BridgeTowerForContrastiveLearning",
"BridgeTowerForImageAndTextRetrieval",
"BridgeTowerForMaskedLM",
@ -52,7 +50,6 @@ else:
if TYPE_CHECKING:
from .configuration_bridgetower import (
BRIDGETOWER_PRETRAINED_CONFIG_ARCHIVE_MAP,
BridgeTowerConfig,
BridgeTowerTextConfig,
BridgeTowerVisionConfig,
@ -74,7 +71,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bridgetower import (
BRIDGETOWER_PRETRAINED_MODEL_ARCHIVE_LIST,
BridgeTowerForContrastiveLearning,
BridgeTowerForImageAndTextRetrieval,
BridgeTowerForMaskedLM,

View File

@ -24,9 +24,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BRIDGETOWER_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BridgeTowerVisionConfig(PretrainedConfig):
r"""
This is the configuration class to store the vision configuration of a [`BridgeTowerModel`]. Instantiating a

View File

@ -45,9 +45,6 @@ _CHECKPOINT_FOR_DOC = "BridgeTower/bridgetower-base"
_TOKENIZER_FOR_DOC = "RobertaTokenizer"
from ..deprecated._archive_maps import BRIDGETOWER_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
BRIDGETOWER_START_DOCSTRING = r"""
This model is a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`_ subclass. Use
it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and

View File

@ -17,7 +17,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_
_import_structure = {
"configuration_bros": ["BROS_PRETRAINED_CONFIG_ARCHIVE_MAP", "BrosConfig"],
"configuration_bros": ["BrosConfig"],
}
try:
@ -35,7 +35,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_bros"] = [
"BROS_PRETRAINED_MODEL_ARCHIVE_LIST",
"BrosPreTrainedModel",
"BrosModel",
"BrosForTokenClassification",
@ -45,7 +44,7 @@ else:
if TYPE_CHECKING:
from .configuration_bros import BROS_PRETRAINED_CONFIG_ARCHIVE_MAP, BrosConfig
from .configuration_bros import BrosConfig
try:
if not is_tokenizers_available():
@ -62,7 +61,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_bros import (
BROS_PRETRAINED_MODEL_ARCHIVE_LIST,
BrosForTokenClassification,
BrosModel,
BrosPreTrainedModel,

View File

@ -21,9 +21,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import BROS_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class BrosConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`BrosModel`] or a [`TFBrosModel`]. It is used to

View File

@ -48,9 +48,6 @@ _CHECKPOINT_FOR_DOC = "jinho8345/bros-base-uncased"
_CONFIG_FOR_DOC = "BrosConfig"
from ..deprecated._archive_maps import BROS_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
BROS_START_DOCSTRING = r"""
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage

View File

@ -25,7 +25,7 @@ from ...utils import (
_import_structure = {
"configuration_camembert": ["CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "CamembertConfig", "CamembertOnnxConfig"],
"configuration_camembert": ["CamembertConfig", "CamembertOnnxConfig"],
}
try:
@ -51,7 +51,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_camembert"] = [
"CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"CamembertForCausalLM",
"CamembertForMaskedLM",
"CamembertForMultipleChoice",
@ -69,7 +68,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_camembert"] = [
"TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCamembertForCausalLM",
"TFCamembertForMaskedLM",
"TFCamembertForMultipleChoice",
@ -82,7 +80,7 @@ else:
if TYPE_CHECKING:
from .configuration_camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig, CamembertOnnxConfig
from .configuration_camembert import CamembertConfig, CamembertOnnxConfig
try:
if not is_sentencepiece_available():
@ -107,7 +105,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_camembert import (
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
CamembertForCausalLM,
CamembertForMaskedLM,
CamembertForMultipleChoice,
@ -125,7 +122,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_camembert import (
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCamembertForCausalLM,
TFCamembertForMaskedLM,
TFCamembertForMultipleChoice,

View File

@ -26,9 +26,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class CamembertConfig(PretrainedConfig):
"""
This is the configuration class to store the configuration of a [`CamembertModel`] or a [`TFCamembertModel`]. It is

View File

@ -52,9 +52,6 @@ _CHECKPOINT_FOR_DOC = "almanach/camembert-base"
_CONFIG_FOR_DOC = "CamembertConfig"
from ..deprecated._archive_maps import CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
CAMEMBERT_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the

View File

@ -66,9 +66,6 @@ _CHECKPOINT_FOR_DOC = "almanach/camembert-base"
_CONFIG_FOR_DOC = "CamembertConfig"
from ..deprecated._archive_maps import TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
CAMEMBERT_START_DOCSTRING = r"""
This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the

View File

@ -17,7 +17,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_
_import_structure = {
"configuration_canine": ["CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP", "CanineConfig"],
"configuration_canine": ["CanineConfig"],
"tokenization_canine": ["CanineTokenizer"],
}
@ -28,7 +28,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_canine"] = [
"CANINE_PRETRAINED_MODEL_ARCHIVE_LIST",
"CanineForMultipleChoice",
"CanineForQuestionAnswering",
"CanineForSequenceClassification",
@ -41,7 +40,7 @@ else:
if TYPE_CHECKING:
from .configuration_canine import CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP, CanineConfig
from .configuration_canine import CanineConfig
from .tokenization_canine import CanineTokenizer
try:
@ -51,7 +50,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_canine import (
CANINE_PRETRAINED_MODEL_ARCHIVE_LIST,
CanineForMultipleChoice,
CanineForQuestionAnswering,
CanineForSequenceClassification,

View File

@ -21,9 +21,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class CanineConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`CanineModel`]. It is used to instantiate an

View File

@ -53,9 +53,6 @@ _CHECKPOINT_FOR_DOC = "google/canine-s"
_CONFIG_FOR_DOC = "CanineConfig"
from ..deprecated._archive_maps import CANINE_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Support up to 16 hash functions.
_PRIMES = [31, 43, 59, 61, 73, 97, 103, 113, 137, 149, 157, 173, 181, 193, 211, 223]

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_chinese_clip": [
"CHINESE_CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"ChineseCLIPConfig",
"ChineseCLIPOnnxConfig",
"ChineseCLIPTextConfig",
@ -43,7 +42,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_chinese_clip"] = [
"CHINESE_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"ChineseCLIPModel",
"ChineseCLIPPreTrainedModel",
"ChineseCLIPTextModel",
@ -52,7 +50,6 @@ else:
if TYPE_CHECKING:
from .configuration_chinese_clip import (
CHINESE_CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP,
ChineseCLIPConfig,
ChineseCLIPOnnxConfig,
ChineseCLIPTextConfig,
@ -75,7 +72,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_chinese_clip import (
CHINESE_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
ChineseCLIPModel,
ChineseCLIPPreTrainedModel,
ChineseCLIPTextModel,

View File

@ -31,9 +31,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CHINESE_CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ChineseCLIPTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ChineseCLIPModel`]. It is used to instantiate a

View File

@ -49,9 +49,6 @@ _CHECKPOINT_FOR_DOC = "OFA-Sys/chinese-clip-vit-base-patch16"
_CONFIG_FOR_DOC = "ChineseCLIPConfig"
from ..deprecated._archive_maps import CHINESE_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# https://sachinruk.github.io/blog/pytorch/pytorch%20lightning/loss%20function/gpu/2021/03/07/CLIP.html
# Copied from transformers.models.clip.modeling_clip.contrastive_loss
def contrastive_loss(logits: torch.Tensor) -> torch.Tensor:

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_clap": [
"CLAP_PRETRAINED_MODEL_ARCHIVE_LIST",
"ClapAudioConfig",
"ClapConfig",
"ClapTextConfig",
@ -33,7 +32,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_clap"] = [
"CLAP_PRETRAINED_MODEL_ARCHIVE_LIST",
"ClapModel",
"ClapPreTrainedModel",
"ClapTextModel",
@ -45,7 +43,6 @@ else:
if TYPE_CHECKING:
from .configuration_clap import (
CLAP_PRETRAINED_MODEL_ARCHIVE_LIST,
ClapAudioConfig,
ClapConfig,
ClapTextConfig,
@ -60,7 +57,6 @@ if TYPE_CHECKING:
else:
from .feature_extraction_clap import ClapFeatureExtractor
from .modeling_clap import (
CLAP_PRETRAINED_MODEL_ARCHIVE_LIST,
ClapAudioModel,
ClapAudioModelWithProjection,
ClapModel,

View File

@ -45,9 +45,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "laion/clap-htsat-fused"
from ..deprecated._archive_maps import CLAP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Adapted from: https://github.com/LAION-AI/CLAP/blob/6ad05a971ba0622f6acee8c41993e0d02bbed639/src/open_clip/utils.py#L191
def interpolate(hidden_states, ratio):
"""

View File

@ -26,7 +26,6 @@ from ...utils import (
_import_structure = {
"configuration_clip": [
"CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"CLIPConfig",
"CLIPOnnxConfig",
"CLIPTextConfig",
@ -60,7 +59,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_clip"] = [
"CLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"CLIPModel",
"CLIPPreTrainedModel",
"CLIPTextModel",
@ -77,7 +75,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_clip"] = [
"TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCLIPModel",
"TFCLIPPreTrainedModel",
"TFCLIPTextModel",
@ -103,7 +100,6 @@ else:
if TYPE_CHECKING:
from .configuration_clip import (
CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP,
CLIPConfig,
CLIPOnnxConfig,
CLIPTextConfig,
@ -136,7 +132,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_clip import (
CLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
CLIPForImageClassification,
CLIPModel,
CLIPPreTrainedModel,
@ -153,7 +148,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_clip import (
TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCLIPModel,
TFCLIPPreTrainedModel,
TFCLIPTextModel,

View File

@ -31,9 +31,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class CLIPTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`CLIPTextModel`]. It is used to instantiate a CLIP

View File

@ -49,9 +49,6 @@ _IMAGE_CLASS_CHECKPOINT = "openai/clip-vit-base-patch32"
_IMAGE_CLASS_EXPECTED_OUTPUT = "LABEL_0"
from ..deprecated._archive_maps import CLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# contrastive loss function, adapted from
# https://sachinruk.github.io/blog/2021-03-07-clip.html
def contrastive_loss(logits: torch.Tensor) -> torch.Tensor:

View File

@ -52,9 +52,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "openai/clip-vit-base-patch32"
from ..deprecated._archive_maps import TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
LARGE_NEGATIVE = -1e8

View File

@ -18,7 +18,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_clipseg": [
"CLIPSEG_PRETRAINED_CONFIG_ARCHIVE_MAP",
"CLIPSegConfig",
"CLIPSegTextConfig",
"CLIPSegVisionConfig",
@ -33,7 +32,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_clipseg"] = [
"CLIPSEG_PRETRAINED_MODEL_ARCHIVE_LIST",
"CLIPSegModel",
"CLIPSegPreTrainedModel",
"CLIPSegTextModel",
@ -43,7 +41,6 @@ else:
if TYPE_CHECKING:
from .configuration_clipseg import (
CLIPSEG_PRETRAINED_CONFIG_ARCHIVE_MAP,
CLIPSegConfig,
CLIPSegTextConfig,
CLIPSegVisionConfig,
@ -57,7 +54,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_clipseg import (
CLIPSEG_PRETRAINED_MODEL_ARCHIVE_LIST,
CLIPSegForImageSegmentation,
CLIPSegModel,
CLIPSegPreTrainedModel,

View File

@ -24,9 +24,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CLIPSEG_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class CLIPSegTextConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`CLIPSegModel`]. It is used to instantiate an

View File

@ -43,9 +43,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "CIDAS/clipseg-rd64-refined"
from ..deprecated._archive_maps import CLIPSEG_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# contrastive loss function, adapted from
# https://sachinruk.github.io/blog/pytorch/pytorch%20lightning/loss%20function/gpu/2021/03/07/CLIP.html
def contrastive_loss(logits: torch.Tensor) -> torch.Tensor:

View File

@ -22,7 +22,6 @@ from ...utils import (
_import_structure = {
"configuration_clvp": [
"CLVP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"ClvpConfig",
"ClvpDecoderConfig",
"ClvpEncoderConfig",
@ -40,7 +39,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_clvp"] = [
"CLVP_PRETRAINED_MODEL_ARCHIVE_LIST",
"ClvpModelForConditionalGeneration",
"ClvpForCausalLM",
"ClvpModel",
@ -52,7 +50,6 @@ else:
if TYPE_CHECKING:
from .configuration_clvp import (
CLVP_PRETRAINED_CONFIG_ARCHIVE_MAP,
ClvpConfig,
ClvpDecoderConfig,
ClvpEncoderConfig,
@ -68,7 +65,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_clvp import (
CLVP_PRETRAINED_MODEL_ARCHIVE_LIST,
ClvpDecoder,
ClvpEncoder,
ClvpForCausalLM,

View File

@ -29,9 +29,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CLVP_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ClvpEncoderConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ClvpEncoder`]. It is used to instantiate a CLVP

View File

@ -56,9 +56,6 @@ logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "susnato/clvp_dev"
from ..deprecated._archive_maps import CLVP_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.clip.modeling_clip.contrastive_loss
def contrastive_loss(logits: torch.Tensor) -> torch.Tensor:
return nn.functional.cross_entropy(logits, torch.arange(len(logits), device=logits.device))

View File

@ -17,7 +17,7 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_
_import_structure = {
"configuration_codegen": ["CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP", "CodeGenConfig", "CodeGenOnnxConfig"],
"configuration_codegen": ["CodeGenConfig", "CodeGenOnnxConfig"],
"tokenization_codegen": ["CodeGenTokenizer"],
}
@ -36,14 +36,13 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_codegen"] = [
"CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST",
"CodeGenForCausalLM",
"CodeGenModel",
"CodeGenPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_codegen import CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP, CodeGenConfig, CodeGenOnnxConfig
from .configuration_codegen import CodeGenConfig, CodeGenOnnxConfig
from .tokenization_codegen import CodeGenTokenizer
try:
@ -61,7 +60,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_codegen import (
CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST,
CodeGenForCausalLM,
CodeGenModel,
CodeGenPreTrainedModel,

View File

@ -25,9 +25,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class CodeGenConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`CodeGenModel`]. It is used to instantiate a

View File

@ -34,9 +34,6 @@ _CHECKPOINT_FOR_DOC = "Salesforce/codegen-2B-mono"
_CONFIG_FOR_DOC = "CodeGenConfig"
from ..deprecated._archive_maps import CODEGEN_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.gptj.modeling_gptj.create_sinusoidal_positions
def create_sinusoidal_positions(num_pos: int, dim: int) -> torch.Tensor:
inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2, dtype=torch.int64) / dim))

View File

@ -23,7 +23,7 @@ from ...utils import (
_import_structure = {
"configuration_cohere": ["COHERE_PRETRAINED_CONFIG_ARCHIVE_MAP", "CohereConfig"],
"configuration_cohere": ["CohereConfig"],
}
@ -49,7 +49,7 @@ else:
if TYPE_CHECKING:
from .configuration_cohere import COHERE_PRETRAINED_CONFIG_ARCHIVE_MAP, CohereConfig
from .configuration_cohere import CohereConfig
try:
if not is_tokenizers_available():

View File

@ -25,8 +25,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
COHERE_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
class CohereConfig(PretrainedConfig):
r"""

View File

@ -19,7 +19,6 @@ from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_avail
_import_structure = {
"configuration_conditional_detr": [
"CONDITIONAL_DETR_PRETRAINED_CONFIG_ARCHIVE_MAP",
"ConditionalDetrConfig",
"ConditionalDetrOnnxConfig",
]
@ -41,7 +40,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_conditional_detr"] = [
"CONDITIONAL_DETR_PRETRAINED_MODEL_ARCHIVE_LIST",
"ConditionalDetrForObjectDetection",
"ConditionalDetrForSegmentation",
"ConditionalDetrModel",
@ -51,7 +49,6 @@ else:
if TYPE_CHECKING:
from .configuration_conditional_detr import (
CONDITIONAL_DETR_PRETRAINED_CONFIG_ARCHIVE_MAP,
ConditionalDetrConfig,
ConditionalDetrOnnxConfig,
)
@ -72,7 +69,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_conditional_detr import (
CONDITIONAL_DETR_PRETRAINED_MODEL_ARCHIVE_LIST,
ConditionalDetrForObjectDetection,
ConditionalDetrForSegmentation,
ConditionalDetrModel,

View File

@ -27,9 +27,6 @@ from ..auto import CONFIG_MAPPING
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CONDITIONAL_DETR_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ConditionalDetrConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ConditionalDetrModel`]. It is used to instantiate

View File

@ -61,9 +61,6 @@ _CONFIG_FOR_DOC = "ConditionalDetrConfig"
_CHECKPOINT_FOR_DOC = "microsoft/conditional-detr-resnet-50"
from ..deprecated._archive_maps import CONDITIONAL_DETR_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
@dataclass
class ConditionalDetrDecoderOutput(BaseModelOutputWithCrossAttentions):
"""

View File

@ -23,7 +23,7 @@ from ...utils import (
_import_structure = {
"configuration_convbert": ["CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvBertConfig", "ConvBertOnnxConfig"],
"configuration_convbert": ["ConvBertConfig", "ConvBertOnnxConfig"],
"tokenization_convbert": ["ConvBertTokenizer"],
}
@ -42,7 +42,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_convbert"] = [
"CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ConvBertForMaskedLM",
"ConvBertForMultipleChoice",
"ConvBertForQuestionAnswering",
@ -62,7 +61,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_convbert"] = [
"TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFConvBertForMaskedLM",
"TFConvBertForMultipleChoice",
"TFConvBertForQuestionAnswering",
@ -75,7 +73,7 @@ else:
if TYPE_CHECKING:
from .configuration_convbert import CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, ConvBertConfig, ConvBertOnnxConfig
from .configuration_convbert import ConvBertConfig, ConvBertOnnxConfig
from .tokenization_convbert import ConvBertTokenizer
try:
@ -93,7 +91,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_convbert import (
CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
ConvBertForMaskedLM,
ConvBertForMultipleChoice,
ConvBertForQuestionAnswering,
@ -112,7 +109,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_tf_convbert import (
TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFConvBertForMaskedLM,
TFConvBertForMultipleChoice,
TFConvBertForQuestionAnswering,

View File

@ -25,9 +25,6 @@ from ...utils import logging
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ConvBertConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ConvBertModel`]. It is used to instantiate an

View File

@ -46,9 +46,6 @@ _CHECKPOINT_FOR_DOC = "YituTech/conv-bert-base"
_CONFIG_FOR_DOC = "ConvBertConfig"
from ..deprecated._archive_maps import CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
def load_tf_weights_in_convbert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:

View File

@ -61,9 +61,6 @@ _CHECKPOINT_FOR_DOC = "YituTech/conv-bert-base"
_CONFIG_FOR_DOC = "ConvBertConfig"
from ..deprecated._archive_maps import TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST # noqa: F401, E402
# Copied from transformers.models.albert.modeling_tf_albert.TFAlbertEmbeddings with Albert->ConvBert
class TFConvBertEmbeddings(keras.layers.Layer):
"""Construct the embeddings from word, position and token_type embeddings."""

View File

@ -22,9 +22,7 @@ from ...utils import (
)
_import_structure = {
"configuration_convnext": ["CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvNextConfig", "ConvNextOnnxConfig"]
}
_import_structure = {"configuration_convnext": ["ConvNextConfig", "ConvNextOnnxConfig"]}
try:
if not is_vision_available():
@ -42,7 +40,6 @@ except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_convnext"] = [
"CONVNEXT_PRETRAINED_MODEL_ARCHIVE_LIST",
"ConvNextForImageClassification",
"ConvNextModel",
"ConvNextPreTrainedModel",
@ -62,7 +59,7 @@ else:
]
if TYPE_CHECKING:
from .configuration_convnext import CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP, ConvNextConfig, ConvNextOnnxConfig
from .configuration_convnext import ConvNextConfig, ConvNextOnnxConfig
try:
if not is_vision_available():
@ -80,7 +77,6 @@ if TYPE_CHECKING:
pass
else:
from .modeling_convnext import (
CONVNEXT_PRETRAINED_MODEL_ARCHIVE_LIST,
ConvNextBackbone,
ConvNextForImageClassification,
ConvNextModel,

View File

@ -28,9 +28,6 @@ from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_feat
logger = logging.get_logger(__name__)
from ..deprecated._archive_maps import CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP # noqa: F401, E402
class ConvNextConfig(BackboneConfigMixin, PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`ConvNextModel`]. It is used to instantiate an

Some files were not shown because too many files have changed in this diff Show More