diff --git a/pytorch_transformers/tests/modeling_common_test.py b/pytorch_transformers/tests/modeling_common_test.py index ff321193a5..5e30cd1e32 100644 --- a/pytorch_transformers/tests/modeling_common_test.py +++ b/pytorch_transformers/tests/modeling_common_test.py @@ -68,6 +68,16 @@ class CommonTestCases: self.assertIn(param.data.mean().item(), [0.0, 1.0], msg="Parameter {} of model {} seems not properly initialized".format(name, model_class)) + def test_determinism(self): + config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() + + for model_class in self.all_model_classes: + model = model_class(config) + model.eval() + first, second = model(inputs_dict["input_ids"])[0], model(inputs_dict["input_ids"])[0] + self.assertEqual(first.ne(second).sum().item(), 0) + + def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() diff --git a/pytorch_transformers/tests/modeling_tf_common_test.py b/pytorch_transformers/tests/modeling_tf_common_test.py index ecd1e387f9..5e7d29cb7f 100644 --- a/pytorch_transformers/tests/modeling_tf_common_test.py +++ b/pytorch_transformers/tests/modeling_tf_common_test.py @@ -298,6 +298,14 @@ class TFCommonTestCases: # self.assertGreater(len(params_not_tied), len(params_tied)) # self.assertEqual(len(params_tied_2), len(params_tied)) + def test_determinism(self): + config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() + + for model_class in self.all_model_classes: + model = model_class(config) + first, second = model(inputs_dict, training=False)[0], model(inputs_dict, training=False)[0] + self.assertTrue(tf.math.equal(first, second).numpy().all()) + def ids_tensor(shape, vocab_size, rng=None, name=None, dtype=None): """Creates a random int32 tensor of the shape within the vocab size."""