Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DAR-5370][External] Axially-agnostic pixdim scaling on import for medical files that require it #991

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 0 additions & 18 deletions darwin/dataset/remote_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -996,24 +996,6 @@ def import_annotation(self, item_id: ItemId, payload: Dict[str, Any]) -> None:
"""
...

@abstractmethod
def _get_remote_files_that_require_legacy_scaling(self) -> List[Path]:
"""
Get all remote files that have been scaled upon upload. These files require that
NifTI annotations are similarly scaled during import

Parameters
----------
dataset : RemoteDataset
The remote dataset to get the files from

Returns
-------
List[Path]
A list of full remote paths of dataset items that require NifTI annotations to be scaled
"""
...

@property
def remote_path(self) -> Path:
"""Returns an URL specifying the location of the remote dataset."""
Expand Down
46 changes: 0 additions & 46 deletions darwin/dataset/remote_dataset_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
Tuple,
Union,
)
import numpy as np
from pydantic import ValidationError
from requests.models import Response

Expand Down Expand Up @@ -873,51 +872,6 @@ def register_multi_slotted(
print(f"Reistration complete. Check your items in the dataset: {self.slug}")
return results

def _get_remote_files_that_require_legacy_scaling(
self,
) -> Dict[str, Dict[str, Any]]:
"""
Get all remote files that have been scaled upon upload. These files require that
NifTI annotations are similarly scaled during import.

The in-platform affines are returned for each legacy file, as this is required
to properly re-orient the annotations during import.

Parameters
----------
dataset : RemoteDataset
The remote dataset to get the files from

Returns
-------
Dict[str, Dict[str, Any]]
A dictionary of remote file full paths to their slot affine maps
"""
remote_files_that_require_legacy_scaling = {}
remote_files = self.fetch_remote_files(
filters={"statuses": ["new", "annotate", "review", "complete", "archived"]}
)
for remote_file in remote_files:
if not remote_file.slots[0].get("metadata", {}).get("medical", {}):
continue
if not (
remote_file.slots[0]
.get("metadata", {})
.get("medical", {})
.get("handler")
):
slot_affine_map = {}
for slot in remote_file.slots:
slot_affine_map[slot["slot_name"]] = np.array(
slot["metadata"]["medical"]["affine"],
dtype=np.float64,
)
remote_files_that_require_legacy_scaling[
Path(remote_file.full_path)
] = slot_affine_map

return remote_files_that_require_legacy_scaling


def _find_files_to_upload_as_multi_file_items(
search_files: List[PathLike],
Expand Down
49 changes: 49 additions & 0 deletions darwin/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,55 @@ def get_sub(self, annotation_type: str) -> Optional[SubAnnotation]:
return sub
return None

def scale_coordinates(self, x_scale: float, y_scale: float) -> None:
"""
Multiplies the coordinates of the annotation by the given values.

Parameters
----------
x_scale : float
Scale factor for x coordinates
y_scale : float
Scale factor for y coordinates
"""
annotation_type = (
self.annotation_class.annotation_type
if hasattr(self, "annotation_class")
else None
)

if annotation_type == "bounding_box":
self.data["x"] *= x_scale
self.data["y"] *= y_scale
self.data["w"] *= x_scale
self.data["h"] *= y_scale

elif annotation_type == "polygon":
for path in self.data["paths"]:
for point in path:
point["x"] *= x_scale
point["y"] *= y_scale
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we also update additional_paths ?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe it's only via the API that complex polygons are represented with path and additional_paths. In exported JSON and in the parsed form here, complex polygons are represented by a list of paths in paths. For example:

{'paths': [[{'x': 1185.2913, 'y': 1229.2412}, {'x': 951.3686, 'y': 1265.935}, {'x': 882.5678, 'y': 1513.6179}, {'x': 882.5678, 'y': 1518.2046}, {'x': 1176.1179, 'y': 1545.7249}], [{'x': 410.1355, 'y': 1871.3821}, {'x': 589.0176, 'y': 967.7981}, {'x': 1474.2547, 'y': 990.7317}, {'x': 1836.6057, 'y': 1949.3564}]], 'bounding_box': {'x': 410.1355, 'y': 967.7981, 'w': 1426.4702000000002, 'h': 981.5582999999999}}


elif annotation_type == "ellipse":
self.data["center"]["x"] *= x_scale
self.data["center"]["y"] *= y_scale
self.data["radius"]["x"] *= x_scale
self.data["radius"]["y"] *= y_scale

elif annotation_type == "line":
for point in self.data["path"]:
point["x"] *= x_scale
point["y"] *= y_scale

elif annotation_type == "keypoint":
self.data["x"] *= x_scale
self.data["y"] *= y_scale

elif annotation_type == "skeleton":
for node in self.data["nodes"]:
node["x"] *= x_scale
node["y"] *= y_scale


@dataclass(frozen=False, eq=True)
class VideoAnnotation:
Expand Down
Loading
Loading