diff --git a/tests/unit/ops/transformer/inference/test_gelu.py b/tests/unit/ops/transformer/inference/test_gelu.py index 54f762c6b232..62338ff384f1 100644 --- a/tests/unit/ops/transformer/inference/test_gelu.py +++ b/tests/unit/ops/transformer/inference/test_gelu.py @@ -23,10 +23,7 @@ def allclose(x, y): def version_appropriate_gelu(activations): # gelu behavior changes (correctly) in torch 1.12 - if required_torch_version(min_version=1.12): - return torch.nn.functional.gelu(activations, approximate='tanh') - else: - return torch.nn.functional.gelu(activations) + return torch.nn.functional.gelu(activations) def run_gelu_reference(activations): # Expected behavior is that of casting to float32 internally and using the tanh approximation