Skip to content

Commit

Permalink
add knobs for rope/swiglu fusion (#8184)
Browse files Browse the repository at this point in the history
Signed-off-by: Hongbin Liu <[email protected]>
Co-authored-by: Hongbin Liu <[email protected]>
Signed-off-by: Pablo Garay <[email protected]>
  • Loading branch information
2 people authored and pablo-garay committed Mar 19, 2024
1 parent 56e54bf commit 33095bb
Showing 1 changed file with 4 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -445,10 +445,11 @@ def build_transformer_config(self) -> TransformerConfig:
attention_softmax_in_fp32 = True

bias_activation_fusion = self.cfg.get('bias_activation_fusion', True)
bias_gelu_fusion = True if bias_activation_fusion else False

bias_dropout_fusion = self.cfg.get('bias_dropout_add_fusion', True)

apply_rope_fusion = self.cfg.get('apply_rope_fusion', True)

# TODO: need to check if recompute APIs are matching up properly
recompute_granularity = self.cfg.get('activations_checkpoint_granularity', None)
recompute_method = self.cfg.get('activations_checkpoint_method', None)
Expand All @@ -466,8 +467,9 @@ def build_transformer_config(self) -> TransformerConfig:
'init_method': init_method,
'output_layer_init_method': output_layer_init_method,
'attention_softmax_in_fp32': attention_softmax_in_fp32,
'bias_gelu_fusion': bias_gelu_fusion,
'bias_activation_fusion': bias_activation_fusion,
'bias_dropout_fusion': bias_dropout_fusion,
'apply_rope_fusion': apply_rope_fusion,
'recompute_granularity': recompute_granularity,
'recompute_method': recompute_method,
'recompute_num_layers': recompute_num_layers,
Expand Down

0 comments on commit 33095bb

Please sign in to comment.