Skip to content

Commit

Permalink
making sorting for K >1 optional in KNN points function
Browse files Browse the repository at this point in the history
Summary: Added `sorted` argument to the `knn_points` function. This came up during the benchmarking against Faiss - sorting added extra memory usage. Match the memory usage of Faiss by making sorting optional.

Reviewed By: bottler, gkioxari

Differential Revision: D22329070

fbshipit-source-id: 0828ff9b48eefce99ce1f60089389f6885d03139
  • Loading branch information
nikhilaravi authored and facebook-github-bot committed Jul 2, 2020
1 parent dd4a35c commit 806ca36
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 9 deletions.
19 changes: 14 additions & 5 deletions pytorch3d/ops/knn.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ class _knn_points(Function):
"""

@staticmethod
def forward(ctx, p1, p2, lengths1, lengths2, K, version):
def forward(
ctx, p1, p2, lengths1, lengths2, K, version, return_sorted: bool = True
):
"""
K-Nearest neighbors on point clouds.
Expand All @@ -36,6 +38,8 @@ def forward(ctx, p1, p2, lengths1, lengths2, K, version):
K: Integer giving the number of nearest neighbors to return.
version: Which KNN implementation to use in the backend. If version=-1,
the correct implementation is selected based on the shapes of the inputs.
return_sorted: (bool) whether to return the nearest neighbors sorted in
ascending order of distance.
Returns:
p1_dists: Tensor of shape (N, P1, K) giving the squared distances to
Expand All @@ -52,7 +56,7 @@ def forward(ctx, p1, p2, lengths1, lengths2, K, version):
idx, dists = _C.knn_points_idx(p1, p2, lengths1, lengths2, K, version)

# sort KNN in ascending order if K > 1
if K > 1:
if K > 1 and return_sorted:
if lengths2.min() < K:
P1 = p1.shape[1]
mask = lengths2[:, None] <= torch.arange(K, device=dists.device)[None]
Expand Down Expand Up @@ -84,7 +88,7 @@ def backward(ctx, grad_dists, grad_idx):
grad_p1, grad_p2 = _C.knn_points_backward(
p1, p2, lengths1, lengths2, idx, grad_dists
)
return grad_p1, grad_p2, None, None, None, None
return grad_p1, grad_p2, None, None, None, None, None


def knn_points(
Expand All @@ -95,6 +99,7 @@ def knn_points(
K: int = 1,
version: int = -1,
return_nn: bool = False,
return_sorted: bool = True,
):
"""
K-Nearest neighbors on point clouds.
Expand All @@ -113,7 +118,9 @@ def knn_points(
K: Integer giving the number of nearest neighbors to return.
version: Which KNN implementation to use in the backend. If version=-1,
the correct implementation is selected based on the shapes of the inputs.
return_nn: If set to True returns the K nearest neighors in p2 for each point in p1.
return_nn: If set to True returns the K nearest neighbors in p2 for each point in p1.
return_sorted: (bool) whether to return the nearest neighbors sorted in
ascending order of distance.
Returns:
dists: Tensor of shape (N, P1, K) giving the squared distances to
Expand Down Expand Up @@ -158,7 +165,9 @@ def knn_points(
lengths2 = torch.full((p1.shape[0],), P2, dtype=torch.int64, device=p1.device)

# pyre-fixme[16]: `_knn_points` has no attribute `apply`.
p1_dists, p1_idx = _knn_points.apply(p1, p2, lengths1, lengths2, K, version)
p1_dists, p1_idx = _knn_points.apply(
p1, p2, lengths1, lengths2, K, version, return_sorted
)

p2_nn = None
if return_nn:
Expand Down
27 changes: 23 additions & 4 deletions tests/test_knn.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _knn_points_naive(p1, p2, lengths1, lengths2, K: int) -> torch.Tensor:

return _KNN(dists=dists, idx=idx, knn=None)

def _knn_vs_python_square_helper(self, device):
def _knn_vs_python_square_helper(self, device, return_sorted):
Ns = [1, 4]
Ds = [3, 5, 8]
P1s = [8, 24]
Expand All @@ -70,7 +70,24 @@ def _knn_vs_python_square_helper(self, device):

# forward
out1 = self._knn_points_naive(x, y, lengths1=None, lengths2=None, K=K)
out2 = knn_points(x_cuda, y_cuda, K=K, version=version)
out2 = knn_points(
x_cuda, y_cuda, K=K, version=version, return_sorted=return_sorted
)
if K > 1 and not return_sorted:
# check out2 is not sorted
self.assertFalse(torch.allclose(out1[0], out2[0]))
self.assertFalse(torch.allclose(out1[1], out2[1]))
# now sort out2
dists, idx, _ = out2
if P2 < K:
dists[..., P2:] = float("inf")
dists, sort_idx = dists.sort(dim=2)
dists[..., P2:] = 0
else:
dists, sort_idx = dists.sort(dim=2)
idx = idx.gather(2, sort_idx)
out2 = _KNN(dists, idx, None)

self.assertClose(out1[0], out2[0])
self.assertTrue(torch.all(out1[1] == out2[1]))

Expand All @@ -86,11 +103,13 @@ def _knn_vs_python_square_helper(self, device):

def test_knn_vs_python_square_cpu(self):
device = torch.device("cpu")
self._knn_vs_python_square_helper(device)
self._knn_vs_python_square_helper(device, return_sorted=True)

def test_knn_vs_python_square_cuda(self):
device = get_random_cuda_device()
self._knn_vs_python_square_helper(device)
# Check both cases where the output is sorted and unsorted
self._knn_vs_python_square_helper(device, return_sorted=True)
self._knn_vs_python_square_helper(device, return_sorted=False)

def _knn_vs_python_ragged_helper(self, device):
Ns = [1, 4]
Expand Down

0 comments on commit 806ca36

Please sign in to comment.