Skip to content

Commit

Permalink
Test with the same gelu always
Browse files Browse the repository at this point in the history
  • Loading branch information
loadams committed Dec 17, 2024
1 parent db6851f commit 0bd98e4
Showing 1 changed file with 1 addition and 4 deletions.
5 changes: 1 addition & 4 deletions tests/unit/ops/transformer/inference/test_gelu.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,7 @@ def allclose(x, y):

def version_appropriate_gelu(activations):
# gelu behavior changes (correctly) in torch 1.12
if required_torch_version(min_version=1.12):
return torch.nn.functional.gelu(activations, approximate='tanh')
else:
return torch.nn.functional.gelu(activations)
return torch.nn.functional.gelu(activations)

def run_gelu_reference(activations):
# Expected behavior is that of casting to float32 internally and using the tanh approximation
Expand Down

0 comments on commit 0bd98e4

Please sign in to comment.