Skip to content

Commit

Permalink
[CI/Build][CPU][Bugfix] Fix CPU CI (vllm-project#12150)
Browse files Browse the repository at this point in the history
Signed-off-by: jiang1.li <[email protected]>
Signed-off-by: Isotr0py <[email protected]>
  • Loading branch information
bigPYJ1151 authored and Isotr0py committed Feb 2, 2025
1 parent 68a7e95 commit 72c4266
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
4 changes: 2 additions & 2 deletions .buildkite/run-cpu-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,6 @@ function cpu_tests() {
tests/lora/test_qwen2vl.py"
}

# All of CPU tests are expected to be finished less than 25 mins.
# All of CPU tests are expected to be finished less than 40 mins.
export -f cpu_tests
timeout 30m bash -c "cpu_tests $CORE_RANGE $NUMA_NODE"
timeout 40m bash -c "cpu_tests $CORE_RANGE $NUMA_NODE"
8 changes: 6 additions & 2 deletions vllm/model_executor/layers/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ class FatreluAndMul(CustomOp):
def __init__(self, threshold: float = 0.):
super().__init__()
self.threshold = threshold
if current_platform.is_cuda_alike() or current_platform.is_cpu():
if current_platform.is_cuda_alike():
self.op = torch.ops._C.fatrelu_and_mul
elif current_platform.is_cpu():
self._forward_method = self.forward_native

def forward_native(self, x: torch.Tensor) -> torch.Tensor:
d = x.shape[-1] // 2
Expand Down Expand Up @@ -100,11 +102,13 @@ class MulAndSilu(CustomOp):

def __init__(self):
super().__init__()
if current_platform.is_cuda_alike() or current_platform.is_cpu():
if current_platform.is_cuda_alike():
self.op = torch.ops._C.mul_and_silu
elif current_platform.is_xpu():
from vllm._ipex_ops import ipex_ops
self.op = ipex_ops.silu_and_mul
elif current_platform.is_cpu():
self._forward_method = self.forward_native

def forward_native(self, x: torch.Tensor) -> torch.Tensor:
"""PyTorch-native implementation equivalent to forward()."""
Expand Down

0 comments on commit 72c4266

Please sign in to comment.