Skip to content

Commit

Permalink
Merge pull request #609 from FZJ-INM1-BDA/fix_bbox_planar_intersection
Browse files Browse the repository at this point in the history
fix bbox planar intersection
  • Loading branch information
AhmetNSimsek authored Oct 26, 2024
2 parents 7e3c69c + e068672 commit 0662f69
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 31 deletions.
50 changes: 30 additions & 20 deletions e2e/features/image/test_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,38 +3,48 @@
from siibra.features.image.image import Image
import time

PRERELEASE_FEATURES_W_NO_DATASET = [
"The Enriched Connectome - Block face images of full sagittal human brain sections (blockface)",
"The Enriched Connectome - 3D polarized light imaging connectivity data of full sagittal human brain sections (HSV fibre orientation map)",
]
all_image_features = [f for ft in siibra.features.Feature._SUBCLASSES[siibra.features.image.image.Image] for f in ft._get_instances()]


@pytest.mark.parametrize("feature", all_image_features)
def test_feature_has_datasets(feature: Image):
if feature.name in PRERELEASE_FEATURES_W_NO_DATASET:
if len(feature.datasets) > 0:
pytest.fail(f"Feature '{feature}' was listed as prerelase previosly but now have dataset information. Please update `PRERELEASE_FEATURES_W_NO_DATASET`")
pytest.skip(f"Feature '{feature}' has no datasets yet as it is a prerelease data.")
assert len(feature.datasets) > 0, f"{feature} has no datasets"


def test_images_datasets_names():
start = time.time()
all_ds_names = {ds.name for f in all_image_features for ds in f.datasets}
end = time.time()
duration = start - end
assert len(all_ds_names) == 10, "expected 10 distinct names" # this must be updated if new datasets are added
assert duration < 1, "Expected getting dataset names to be less than 1s"


# Update this as new configs are added
results = [
query_and_results = [
(siibra.features.get(siibra.get_template("big brain"), "CellbodyStainedSection"), 145),
(siibra.features.get(siibra.get_template("big brain"), "CellBodyStainedVolumeOfInterest"), 2),
(siibra.features.get(siibra.get_template("mni152"), "image", restrict_space=True), 4),
(siibra.features.get(siibra.get_template("mni152"), "image", restrict_space=False), 13),
(siibra.features.get(siibra.get_template("mni152"), "image", restrict_space=False), 13), # TODO: should this query find all the images or it is okay if bigbrain sections fail?
(siibra.features.get(siibra.get_region('julich 3.1', 'hoc1 left'), "CellbodyStainedSection"), 45),
(siibra.features.get(siibra.get_region('julich 2.9', 'hoc1 left'), "CellbodyStainedSection"), 41)
]
features = [f for fts, _ in results for f in fts]


@pytest.mark.parametrize("feature", features)
def test_feature_has_datasets(feature: Image):
assert len(feature.datasets) > 0


@pytest.mark.parametrize("features, result_len", results)
@pytest.mark.parametrize("query_results, result_len", query_and_results)
def test_image_query_results(
features: Image,
query_results: Image,
result_len: int
):
assert len(features) == result_len


def test_images_datasets_names():
start = time.time()
all_ds_names = {ds.name for f in features for ds in f.datasets}
end = time.time()
duration = start - end
assert len(all_ds_names) == 9, "expected 9 distinct names"
assert duration < 1, "Expected getting dataset names to be less than 1s"
assert len(query_results) == result_len


def test_color_channel_fetching():
Expand Down
19 changes: 13 additions & 6 deletions siibra/locations/boundingbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,12 @@ def is_planar(self) -> bool:
def __str__(self):
if self.space is None:
return (
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)}) mm "
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)}) mm"
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)})mm "
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)})mm"
)
else:
return (
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)}) mm "
f"Bounding box from ({','.join(f'{v:.2f}' for v in self.minpoint)})mm "
f"to ({','.join(f'{v:.2f}' for v in self.maxpoint)})mm in {self.space.name} space"
)

Expand Down Expand Up @@ -188,12 +188,18 @@ def _intersect_bbox(self, other: 'BoundingBox', dims=[0, 1, 2]):
result_minpt.append(A[dim])
result_maxpt.append(B[dim])

if result_minpt == result_maxpt:
return result_minpt

bbox = BoundingBox(
point1=point.Point(result_minpt, self.space),
point2=point.Point(result_maxpt, self.space),
space=self.space,
)
return bbox if bbox.volume > 0 else None

if bbox.volume == 0 and sum(cmin == cmax for cmin, cmax in zip(result_minpt, result_maxpt)) == 2:
return None
return bbox

def _intersect_mask(self, mask: 'Nifti1Image', threshold=0):
"""Intersect this bounding box with an image mask. Returns None if they do not intersect.
Expand Down Expand Up @@ -291,9 +297,10 @@ def warp(self, space):
return self
else:
try:
return self.corners.warp(spaceobj).boundingbox
except ValueError:
warped_corners = self.corners.warp(spaceobj)
except SpaceWarpingFailedError:
raise SpaceWarpingFailedError(f"Warping {str(self)} to {spaceobj.name} not successful.")
return warped_corners.boundingbox

def transform(self, affine: np.ndarray, space=None):
"""Returns a new bounding box obtained by transforming the
Expand Down
13 changes: 9 additions & 4 deletions siibra/locations/point.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from ..commons import logger
from ..retrieval.requests import HttpRequest
from ..exceptions import SpaceWarpingFailedError

from urllib.parse import quote
import re
Expand Down Expand Up @@ -55,10 +56,14 @@ def parse(spec, unit="mm") -> Tuple[float, float, float]:
if len(digits) == 3:
return tuple(float(d) for d in digits)
elif isinstance(spec, (tuple, list)) and len(spec) in [3, 4]:
if any(v is None for v in spec):
raise RuntimeError("Cannot parse cooridantes containing None values.")
if len(spec) == 4:
assert spec[3] == 1
return tuple(float(v.item()) if isinstance(v, np.ndarray) else float(v) for v in spec[:3])
elif isinstance(spec, np.ndarray) and spec.size == 3:
if any(np.isnan(v) for v in spec):
raise RuntimeError("Cannot parse cooridantes containing NaN values.")
return tuple(float(v.item()) if isinstance(v, np.ndarray) else float(v) for v in spec[:3])
elif isinstance(spec, Point):
return spec.coordinate
Expand Down Expand Up @@ -125,7 +130,7 @@ def warp(self, space):
if spaceobj == self.space:
return self
if any(_ not in location.Location.SPACEWARP_IDS for _ in [self.space.id, spaceobj.id]):
raise ValueError(
raise SpaceWarpingFailedError(
f"Cannot convert coordinates between {self.space.id} and {spaceobj.id}"
)
url = "{server}/transform-point?source_space={src}&target_space={tgt}&x={x}&y={y}&z={z}".format(
Expand All @@ -137,9 +142,9 @@ def warp(self, space):
z=self.coordinate[2],
)
response = HttpRequest(url, lambda b: json.loads(b.decode())).get()
if any(map(np.isnan, response['target_point'])):
logger.info(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')
return None
if np.any(np.isnan(response['target_point'])):
raise SpaceWarpingFailedError(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')

return self.__class__(
coordinatespec=tuple(response["target_point"]),
space=spaceobj.id,
Expand Down
7 changes: 6 additions & 1 deletion siibra/locations/pointset.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from ..retrieval.requests import HttpRequest
from ..commons import logger
from ..exceptions import SpaceWarpingFailedError

from typing import List, Union, Tuple
import numbers
Expand Down Expand Up @@ -149,7 +150,7 @@ def warp(self, space, chunksize=1000):
if spaceobj == self.space:
return self
if any(_ not in location.Location.SPACEWARP_IDS for _ in [self.space.id, spaceobj.id]):
raise ValueError(
raise SpaceWarpingFailedError(
f"Cannot convert coordinates between {self.space.id} and {spaceobj.id}"
)

Expand Down Expand Up @@ -178,6 +179,10 @@ def warp(self, space, chunksize=1000):
).data
tgt_points.extend(list(response["target_points"]))

# TODO: consider using np.isnan(np.dot(arr, arr)). see https://stackoverflow.com/a/45011547
if np.any(np.isnan(response['target_points'])):
raise SpaceWarpingFailedError(f'Warping {str(self)} to {spaceobj.name} resulted in NaN')

return self.__class__(coordinates=tuple(tgt_points), space=spaceobj, labels=self.labels)

def transform(self, affine: np.ndarray, space=None):
Expand Down

0 comments on commit 0662f69

Please sign in to comment.