Improve test protocol for inputs_embeds in TF

cc @lysandrejik
This commit is contained in:
Julien Chaumond 2019-11-26 14:37:32 -05:00
parent b632145273
commit cf62bdc962
1 changed files with 9 additions and 4 deletions

View File

@ -426,10 +426,15 @@ class TFCommonTestCases:
try:
x = wte([input_ids], mode="embedding")
except:
if hasattr(self.model_tester, "embedding_size"):
x = tf.ones(input_ids.shape + [model.config.embedding_size], dtype=tf.dtypes.float32)
else:
x = tf.ones(input_ids.shape + [self.model_tester.hidden_size], dtype=tf.dtypes.float32)
x = wte([input_ids, None, None, None], mode="embedding")
# ^^ In our TF models, the input_embeddings can take slightly different forms,
# so we try a few of them.
# We used to fall back to just synthetically creating a dummy tensor of ones:
#
# if hasattr(self.model_tester, "embedding_size"):
# x = tf.ones(input_ids.shape + [self.model_tester.embedding_size], dtype=tf.dtypes.float32)
# else:
# x = tf.ones(input_ids.shape + [self.model_tester.hidden_size], dtype=tf.dtypes.float32)
inputs_dict["inputs_embeds"] = x
outputs = model(inputs_dict)