Skip to content

Commit

Permalink
Fix comment formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
raviskolli committed Apr 14, 2021
1 parent 2a81d54 commit b947293
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/transformers/models/t5/modeling_t5.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ def forward(

# clamp inf values to enable fp16 training
if self.ort:
#Remove data-based control flow for static graph
# Remove data-based control flow for static graph
if hidden_states.dtype == torch.float16:
clamp_value = torch.where(torch.isinf(hidden_states).any(), torch.finfo(hidden_states.dtype).max - 1000,
torch.finfo(hidden_states.dtype).max)
Expand Down Expand Up @@ -679,7 +679,7 @@ def forward(

# clamp inf values to enable fp16 training
if self.ort:
#Remove data-based control flow for static graph
# Remove data-based control flow for static graph
if hidden_states.dtype == torch.float16:
clamp_value = torch.where(torch.isinf(hidden_states).any(), torch.finfo(hidden_states.dtype).max - 1000,
torch.finfo(hidden_states.dtype).max)
Expand All @@ -701,7 +701,7 @@ def forward(

# clamp inf values to enable fp16 training
if self.ort:
#Remove data-based control flow for static graph
# Remove data-based control flow for static graph
if hidden_states.dtype == torch.float16:
clamp_value = torch.where(torch.isinf(hidden_states).any(), torch.finfo(hidden_states.dtype).max - 1000,
torch.finfo(hidden_states.dtype).max)
Expand Down

0 comments on commit b947293

Please sign in to comment.