[Fix doc example] fix missing import jnp (#15291)
* fix missing import jnp * Fix missing jax and k=1 Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
eac4aecc3d
commit
c15bb3fe19
|
@ -1085,6 +1085,7 @@ class FlaxBartPreTrainedModel(FlaxPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BartTokenizer, FlaxBartForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBartForConditionalGeneration.from_pretrained("facebook/bart-large-cnn")
|
||||
|
@ -1353,6 +1354,7 @@ class FlaxBartForConditionalGeneration(FlaxBartPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BartTokenizer, FlaxBartForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBartForConditionalGeneration.from_pretrained("facebook/bart-large-cnn")
|
||||
|
@ -1525,6 +1527,7 @@ FLAX_BART_CONDITIONAL_GENERATION_DOCSTRING = """
|
|||
Mask filling example:
|
||||
|
||||
```python
|
||||
>>> import jax
|
||||
>>> from transformers import BartTokenizer, FlaxBartForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBartForConditionalGeneration.from_pretrained("facebook/bart-large")
|
||||
|
@ -1536,7 +1539,7 @@ FLAX_BART_CONDITIONAL_GENERATION_DOCSTRING = """
|
|||
>>> logits = model(input_ids).logits
|
||||
>>> masked_index = (input_ids[0] == tokenizer.mask_token_id).nonzero()[0].item()
|
||||
>>> probs = jax.nn.softmax(logits[0, masked_index], axis=0)
|
||||
>>> values, predictions = jax.lax.top_k(probs)
|
||||
>>> values, predictions = jax.lax.top_k(probs, k=1)
|
||||
|
||||
>>> tokenizer.decode(predictions).split()
|
||||
```
|
||||
|
|
|
@ -1048,6 +1048,7 @@ class FlaxBlenderbotPreTrainedModel(FlaxPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BlenderbotTokenizer, FlaxBlenderbotForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")
|
||||
|
@ -1317,6 +1318,7 @@ class FlaxBlenderbotForConditionalGeneration(FlaxBlenderbotPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BlenderbotTokenizer, FlaxBlenderbotForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")
|
||||
|
|
|
@ -1060,6 +1060,7 @@ class FlaxBlenderbotSmallPreTrainedModel(FlaxPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BlenderbotSmallTokenizer, FlaxBlenderbotSmallForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M")
|
||||
|
@ -1329,6 +1330,7 @@ class FlaxBlenderbotSmallForConditionalGeneration(FlaxBlenderbotSmallPreTrainedM
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import BlenderbotSmallTokenizer, FlaxBlenderbotSmallForConditionalGeneration
|
||||
|
||||
>>> model = FlaxBlenderbotSmallForConditionalGeneration.from_pretrained("facebook/blenderbot_small-90M")
|
||||
|
|
|
@ -1051,6 +1051,7 @@ class FlaxMarianPreTrainedModel(FlaxPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import MarianTokenizer, FlaxMarianMTModel
|
||||
|
||||
>>> tokenizer = MarianTokenizer.from_pretrained("facebook/marian-large-cnn")
|
||||
|
@ -1319,6 +1320,7 @@ class FlaxMarianMTModel(FlaxMarianPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import MarianTokenizer, FlaxMarianMTModel
|
||||
|
||||
>>> model = FlaxMarianMTModel.from_pretrained("Helsinki-NLP/opus-mt-en-de")
|
||||
|
|
|
@ -1058,6 +1058,7 @@ class FlaxPegasusPreTrainedModel(FlaxPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import PegasusTokenizer, FlaxPegasusForConditionalGeneration
|
||||
|
||||
>>> model = FlaxPegasusForConditionalGeneration.from_pretrained("google/pegasus-large")
|
||||
|
@ -1327,6 +1328,7 @@ class FlaxPegasusForConditionalGeneration(FlaxPegasusPreTrainedModel):
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import PegasusTokenizer, FlaxPegasusForConditionalGeneration
|
||||
|
||||
>>> model = FlaxPegasusForConditionalGeneration.from_pretrained("google/pegasus-large")
|
||||
|
|
|
@ -2188,6 +2188,7 @@ class Flax{{cookiecutter.camelcase_modelname}}PreTrainedModel(FlaxPreTrainedMode
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import {{cookiecutter.camelcase_modelname}}Tokenizer, Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration
|
||||
|
||||
>>> model = Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration.from_pretrained('{{cookiecutter.checkpoint_identifier}}')
|
||||
|
@ -2455,6 +2456,7 @@ class Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration(Flax{{coo
|
|||
Example:
|
||||
|
||||
```python
|
||||
>>> import jax.numpy as jnp
|
||||
>>> from transformers import {{cookiecutter.camelcase_modelname}}Tokenizer, Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration
|
||||
|
||||
>>> model = Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration.from_pretrained('{{cookiecutter.checkpoint_identifier}}')
|
||||
|
@ -2627,6 +2629,7 @@ FLAX_{{cookiecutter.uppercase_modelname}}_CONDITIONAL_GENERATION_DOCSTRING = """
|
|||
Mask filling example:
|
||||
|
||||
```python
|
||||
>>> import jax
|
||||
>>> from transformers import {{cookiecutter.camelcase_modelname}}Tokenizer, Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration
|
||||
|
||||
>>> model = Flax{{cookiecutter.camelcase_modelname}}ForConditionalGeneration.from_pretrained('{{cookiecutter.checkpoint_identifier}}')
|
||||
|
@ -2638,7 +2641,7 @@ FLAX_{{cookiecutter.uppercase_modelname}}_CONDITIONAL_GENERATION_DOCSTRING = """
|
|||
>>> logits = model(input_ids).logits
|
||||
>>> masked_index = (input_ids[0] == tokenizer.mask_token_id).nonzero().item()
|
||||
>>> probs = jax.nn.softmax(logits[0, masked_index], axis=0)
|
||||
>>> values, predictions = jax.lax.top_k(probs)
|
||||
>>> values, predictions = jax.lax.top_k(probs, k=1)
|
||||
|
||||
>>> tokenizer.decode(predictions).split()
|
||||
```
|
||||
|
|
Loading…
Reference in New Issue