Skip to content

Commit

Permalink
some compile-related updates
Browse files Browse the repository at this point in the history
ghstack-source-id: 63af8025c184fd5ad34f2f57bf78a37dda2cd33d
Pull Request resolved: #443
  • Loading branch information
tianyu-l committed Aug 3, 2024
1 parent 72a1614 commit de9fd2b
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 5 deletions.
11 changes: 11 additions & 0 deletions test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,17 @@ def build_test_list():
"1D compile",
"1d_compile",
),
OverrideDefinitions(
[
[
"--training.compile",
"--activation_checkpoint.mode selective",
"--activation_checkpoint.selective_ac_option op",
],
],
"1D compile with selective op AC",
"1d_compile_sac_op",
),
OverrideDefinitions(
[
[
Expand Down
13 changes: 8 additions & 5 deletions torchtitan/parallelisms/parallelize_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,12 +441,15 @@ def apply_ac(model: nn.Module, ac_config: JobConfig):

def apply_compile(model: nn.Module):
"""Apply torch.compile to each transformer block."""

# the following flag can be used to to accelarate per-block compilation
# TODO(bdhirsh): turning it off because it's currently not working with 2D
# TODO(anijain): remove it after it's enabled in pytorch by default
# torch._dynamo.config.inline_inbuilt_nn_modules = True

for layer_id, transformer_block in model.layers.named_children():
# TODO: dynamic shape have some issues so we turn it off for now.
# TODO: inline inbuilt nn modules does not work yet, enable it to accelarate
# compile time.
# torch._dynamo.config.inline_inbuilt_nn_modules = True
transformer_block = torch.compile(transformer_block, dynamic=False)
# turn on per-transformer block compile after AC wrapping and before FSDP
transformer_block = torch.compile(transformer_block, fullgraph=True)
model.layers.register_module(layer_id, transformer_block)

logger.info("Compiled each TransformerBlock with torch.compile")
Expand Down

0 comments on commit de9fd2b

Please sign in to comment.