Skip to content

Commit

Permalink
better error messages
Browse files Browse the repository at this point in the history
Signed-off-by: Lucas Wilkinson <[email protected]>
  • Loading branch information
LucasWilkinson committed Jan 24, 2025
1 parent 501000d commit 712b37a
Showing 1 changed file with 4 additions and 5 deletions.
9 changes: 4 additions & 5 deletions tests/kernels/test_flash_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ def test_flash_attn_with_paged_kv(
) -> None:
torch.set_default_device("cuda")
if is_fa_version_supported(fa_version):
pytest.skip("Flash attention version not supported due to: " + \
fa_version_unsupported_reason(fa_version))
pytest.skip(f"Flash attention version {fa_version} not supported due "
f"to: \"{fa_version_unsupported_reason(fa_version)}\"")

current_platform.seed_everything(0)
num_seqs = len(kv_lens)
Expand Down Expand Up @@ -184,9 +184,8 @@ def test_varlen_with_paged_kv(
) -> None:
torch.set_default_device("cuda")
if is_fa_version_supported(fa_version):
pytest.skip("Flash attention version not supported due to: " + \
fa_version_unsupported_reason(fa_version))

pytest.skip(f"Flash attention version {fa_version} not supported due "
f"to: \"{fa_version_unsupported_reason(fa_version)}\"")
current_platform.seed_everything(0)
num_seqs = len(seq_lens)
query_lens = [x[0] for x in seq_lens]
Expand Down

0 comments on commit 712b37a

Please sign in to comment.