config=torch.hub.load('huggingface/transformers','config','./test/bert_saved_model/')# E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')`
tokenizer=torch.hub.load('huggingface/transformers','tokenizer','./test/bert_saved_model/')# E.g. tokenizer was saved using `save_pretrained('./test/saved_model/')`
model=torch.hub.load('huggingface/transformers','modelForCausalLM','gpt2')# Download model and configuration from huggingface.co and cache.
model=torch.hub.load('huggingface/transformers','modelForCausalLM','./test/saved_model/')# E.g. model was saved using `save_pretrained('./test/saved_model/')`
model=torch.hub.load('huggingface/transformers','modelForCausalLM','gpt2',output_attentions=True)# Update configuration during loading
model=torch.hub.load('huggingface/transformers','modelForMaskedLM','bert-base-uncased')# Download model and configuration from huggingface.co and cache.
model=torch.hub.load('huggingface/transformers','modelForMaskedLM','./test/bert_model/')# E.g. model was saved using `save_pretrained('./test/saved_model/')`
model=torch.hub.load('huggingface/transformers','modelForMaskedLM','bert-base-uncased',output_attentions=True)# Update configuration during loading
assertmodel.config.output_attentions==True
# Loading from a TF checkpoint file instead of a PyTorch model (slower)
model=torch.hub.load('huggingface/transformers','modelForSequenceClassification','bert-base-uncased')# Download model and configuration from huggingface.co and cache.
model=torch.hub.load('huggingface/transformers','modelForSequenceClassification','./test/bert_model/')# E.g. model was saved using `save_pretrained('./test/saved_model/')`
model=torch.hub.load('huggingface/transformers','modelForSequenceClassification','bert-base-uncased',output_attentions=True)# Update configuration during loading
model=torch.hub.load('huggingface/transformers','modelForQuestionAnswering','bert-base-uncased')# Download model and configuration from huggingface.co and cache.
model=torch.hub.load('huggingface/transformers','modelForQuestionAnswering','./test/bert_model/')# E.g. model was saved using `save_pretrained('./test/saved_model/')`
model=torch.hub.load('huggingface/transformers','modelForQuestionAnswering','bert-base-uncased',output_attentions=True)# Update configuration during loading