Skip to content

Commit

Permalink
chore: redesign last minute fixes (#3792)
Browse files Browse the repository at this point in the history
* fixes

* more fixes

* lint
  • Loading branch information
ebezzi authored Dec 19, 2022
1 parent 0eb3314 commit ee9b967
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 18 deletions.
35 changes: 24 additions & 11 deletions backend/layers/persistence/persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,8 +278,23 @@ def get_all_collections_versions(self) -> Iterable[CollectionVersion]:
"""
with self._manage_session() as session:
versions = session.query(CollectionVersionTable).all()
# TODO: do we need to hydrate versions with canonical collections? would require a join or many lookup calls
return [self._row_to_collection_version(v, None) for v in versions]

# Create a canonical mapping
all_canonical_collections = session.query(CollectionTable).all()
all_canonical_map = dict()
for collection_row in all_canonical_collections:
all_canonical_map[str(collection_row.id)] = CanonicalCollection(
CollectionId(str(collection_row.id)),
None if collection_row.version_id is None else CollectionVersionId(str(collection_row.version_id)),
collection_row.originally_published_at,
collection_row.tombstoned,
)

result = []
for v in versions:
result.append(self._row_to_collection_version(v, all_canonical_map[str(v.collection_id)]))

return result

def get_all_mapped_collection_versions(self, get_tombstoned: bool = False) -> Iterable[CollectionVersion]:
"""
Expand Down Expand Up @@ -314,7 +329,6 @@ def get_all_mapped_collection_versions(self, get_tombstoned: bool = False) -> It
canonical_row.originally_published_at,
canonical_row.tombstoned,
)

yield self._row_to_collection_version(version, canonical)

def delete_canonical_collection(self, collection_id: CollectionId) -> None:
Expand Down Expand Up @@ -497,7 +511,6 @@ def get_dataset_artifacts_by_version_id(self, dataset_version_id: DatasetVersion
artifact_ids = (
session.query(DatasetVersionTable.artifacts).filter_by(version_id=dataset_version_id.id).one()
)
print("zzz", artifact_ids)
return self.get_dataset_artifacts(artifact_ids[0])

def create_canonical_dataset(self, collection_version_id: CollectionVersionId) -> DatasetVersion:
Expand Down Expand Up @@ -627,10 +640,8 @@ def add_dataset_to_collection_version_mapping(
# TODO: alternatively use postgres `array_append`
# TODO: make sure that the UUID conversion works
updated_datasets = list(collection_version.datasets)
# print("before", updated_datasets)
updated_datasets.append(uuid.UUID(dataset_version_id.id))
collection_version.datasets = updated_datasets
# print("after", updated_datasets)

def delete_dataset_from_collection_version(
self, collection_version_id: CollectionVersionId, dataset_version_id: DatasetVersionId
Expand Down Expand Up @@ -691,11 +702,13 @@ def get_dataset_mapped_version(self, dataset_id: DatasetId) -> Optional[DatasetV
Returns the dataset version mapped to a canonical dataset_id, or None if not existing
"""
with self._manage_session() as session:
canonical_dataset = session.query(DatasetTable).filter_by(dataset_id=dataset_id).one()
if not canonical_dataset.version_id:
canonical_dataset = session.query(DatasetTable).filter_by(dataset_id=dataset_id.id).one_or_none()
if canonical_dataset is None:
return None
if canonical_dataset.dataset_version_id is None:
return None
dataset_version = (
session.query(DatasetVersionTable).filter_by(version_id=canonical_dataset.version_id).one()
session.query(DatasetVersionTable).filter_by(version_id=canonical_dataset.dataset_version_id).one()
)
dataset_version.canonical_dataset = canonical_dataset
return self._hydrate_dataset_version(dataset_version)
dataset_version.canonical_dataset = canonical_dataset
return self._hydrate_dataset_version(dataset_version)
16 changes: 9 additions & 7 deletions tests/unit/backend/layers/api/test_curation_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,6 @@ def _test(owner):
resp = self.app.get("/curation/v1/collections", query_string=params, headers=header)
self.assertEqual(200, resp.status_code)
body = resp.json
print("BODY", body)
self.assertEqual(1, len(body))
resp_collection = body[0]

Expand Down Expand Up @@ -650,7 +649,8 @@ def test__get_collection_with_dataset_failing_pipeline(self):
self.assertEqual("PIPELINE_FAILURE", actual_dataset["processing_status"])

def test__get_nonexistent_collection__403(self):
res = self.app.get("/curation/v1/collections/test_collection_id_nonexistent")
non_existent_id = str(uuid.uuid4())
res = self.app.get(f"/curation/v1/collections/{non_existent_id}")
self.assertEqual(403, res.status_code)

def test__get_tombstoned_collection__403(self):
Expand Down Expand Up @@ -951,15 +951,13 @@ def test_get_dataset_in_a_collection_200(self):
test_url = f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_version_id}"

response = self.app.get(test_url)
print(response)
self.assertEqual(200, response.status_code)
self.assertEqual(dataset.dataset_id, response.json["id"])

def test_get_dataset_shape(self):
dataset = self.generate_dataset(name="test")
test_url = f"/curation/v1/collections/{dataset.collection_id}/datasets/{dataset.dataset_version_id}"
response = self.app.get(test_url)
print(response.json)
self.assertEqual("test", response.json["title"])

def test_get_dataset_is_primary_data_shape(self):
Expand All @@ -979,20 +977,24 @@ def test_get_dataset_is_primary_data_shape(self):

def test_get_nonexistent_dataset_404(self):
collection = self.generate_unpublished_collection()
test_url = f"/curation/v1/collections/{collection.collection_id}/datasets/1234-1243-2134-234-1342"
non_existent_dataset_id = str(uuid.uuid4())
test_url = f"/curation/v1/collections/{collection.collection_id}/datasets/{non_existent_dataset_id}"
response = self.app.get(test_url)
self.assertEqual(404, response.status_code)

def test_get_datasets_nonexistent_collection_404(self):
test_url = "/curation/v1/collections/nonexistent/datasets/1234-1243-2134-234-1342"
non_existent_dataset_id = str(uuid.uuid4())
non_existent_collection_id = str(uuid.uuid4())
test_url = f"/curation/v1/collections/{non_existent_collection_id}/datasets/{non_existent_dataset_id}"
headers = self.make_owner_header()
response = self.app.get(test_url, headers=headers)
self.assertEqual(404, response.status_code)


class TestPostDataset(BaseAPIPortalTest):
def test_post_datasets_nonexistent_collection_403(self):
test_url = "/curation/v1/collections/nonexistent/datasets"
non_existent_collection_id = str(uuid.uuid4())
test_url = f"/curation/v1/collections/{non_existent_collection_id}/datasets"
headers = self.make_owner_header()
response = self.app.post(test_url, headers=headers)
self.assertEqual(403, response.status_code)
Expand Down

0 comments on commit ee9b967

Please sign in to comment.