Skip to content

Commit

Permalink
skip unsupported tests
Browse files Browse the repository at this point in the history
Signed-off-by: Lucas Wilkinson <[email protected]>
  • Loading branch information
LucasWilkinson committed Jan 24, 2025
1 parent db2efe0 commit 501000d
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions tests/kernels/test_flash_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
import torch

from vllm.platforms import current_platform
from vllm.vllm_flash_attn import (flash_attn_varlen_func,
flash_attn_with_kvcache)
from vllm.vllm_flash_attn import (fa_version_unsupported_reason,
flash_attn_varlen_func,
flash_attn_with_kvcache,
is_fa_version_supported)

NUM_HEADS = [(4, 4), (8, 2), (16, 2)]
HEAD_SIZES = [128, 256]
Expand Down Expand Up @@ -95,10 +97,9 @@ def test_flash_attn_with_paged_kv(
fa_version: int,
) -> None:
torch.set_default_device("cuda")
if fa_version == 3 and (torch.cuda.get_device_capability() == (8, 6)
or torch.cuda.get_device_capability() == (8, 9)):
pytest.skip("Flash attention version 3 fails on 8.6 and 8.9 due to "
"insufficient shared memory for some shapes")
if is_fa_version_supported(fa_version):
pytest.skip("Flash attention version not supported due to: " + \
fa_version_unsupported_reason(fa_version))

current_platform.seed_everything(0)
num_seqs = len(kv_lens)
Expand Down Expand Up @@ -182,10 +183,9 @@ def test_varlen_with_paged_kv(
fa_version: int,
) -> None:
torch.set_default_device("cuda")
if fa_version == 3 and (torch.cuda.get_device_capability() == (8, 6)
or torch.cuda.get_device_capability() == (8, 9)):
pytest.skip("Flash attention version 3 fails on 8.6 and 8.9 due to "
"insufficient shared memory for some shapes")
if is_fa_version_supported(fa_version):
pytest.skip("Flash attention version not supported due to: " + \
fa_version_unsupported_reason(fa_version))

current_platform.seed_everything(0)
num_seqs = len(seq_lens)
Expand Down

0 comments on commit 501000d

Please sign in to comment.