Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: persistence layer clean-up #3835

Merged
merged 28 commits into from
Jan 10, 2023
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
e3303dd
refactor: persistence layer clean-up
nayib-jose-gloria Dec 23, 2022
e21c59f
lint
nayib-jose-gloria Dec 23, 2022
04668e6
update missing id changes
nayib-jose-gloria Dec 23, 2022
3b2f18d
lint fix
nayib-jose-gloria Dec 23, 2022
04c4dc1
more missing id's
nayib-jose-gloria Dec 23, 2022
ad3a3c5
Merge branch 'main' into nayib/persistence-layer-clean-up
nayib-jose-gloria Dec 23, 2022
6ba491e
remove unused import
nayib-jose-gloria Dec 23, 2022
4332f63
correct id name
nayib-jose-gloria Dec 23, 2022
bc0cceb
Merge branch 'nayib/persistence-layer-clean-up' of https://github.com…
nayib-jose-gloria Dec 23, 2022
b6184b9
lint fix
nayib-jose-gloria Dec 23, 2022
006d4ff
id fix
nayib-jose-gloria Dec 23, 2022
d8e4f17
id fix
nayib-jose-gloria Dec 23, 2022
719705b
more ids
nayib-jose-gloria Dec 23, 2022
b938f1f
id fix
nayib-jose-gloria Dec 23, 2022
60c2c07
version id fix
nayib-jose-gloria Dec 23, 2022
e7f2e94
lint fix
nayib-jose-gloria Dec 23, 2022
b8a1dbb
Merge branch 'main' of https://github.com/chanzuckerberg/single-cell-…
nayib-jose-gloria Jan 9, 2023
26cf547
autogenerate migration script for db field changes + update README links
nayib-jose-gloria Jan 9, 2023
3f198ab
update makefile test command documentation
nayib-jose-gloria Jan 9, 2023
f9a8f95
pr feedback on docs
nayib-jose-gloria Jan 9, 2023
f601d68
update migration script + db/local/load-schema + migration docs
nayib-jose-gloria Jan 9, 2023
5ca9ef6
lint fixes
nayib-jose-gloria Jan 10, 2023
eef29f0
remove unused imports
nayib-jose-gloria Jan 10, 2023
df789da
Merge branch 'main' into nayib/persistence-layer-clean-up
nayib-jose-gloria Jan 10, 2023
96d515c
Merge branch 'main' into nayib/persistence-layer-clean-up
nayib-jose-gloria Jan 10, 2023
4872b0a
tabs
nayib-jose-gloria Jan 10, 2023
5972945
Merge branch 'nayib/persistence-layer-clean-up' of https://github.com…
nayib-jose-gloria Jan 10, 2023
31218bc
tabs
nayib-jose-gloria Jan 10, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions DEV_ENV.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,12 @@ The dev environment is initialized with AWS Secrets/S3 data in the [scripts/setu

### Make targets for running tests in dev

| Command | Description | Notes |
| ---------------------------- | --------------------------------------------- | ----- |
| `make local-unit-test` | Run backend tests in the dev environment | |
| `make local-functional-test` | Run functional tests in the dev environment | |
| `make local-smoke-test` | Run frontend/e2e tests in the dev environment | |
| Command | Description | Notes |
|------------------------------|----------------------------------------------------------------------------------------------| ----- |
| `make local-unit-test` | Run backend tests in the local dockerized environment, against mock of persistence layer | |
| `make local-integration-test` | Run backend tests in the local dockerized environment, against dockerized database instance. | |
| `make local-functional-test` | Run functional tests in local dockerized environment | |
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Run [backend] functional tests?

| `make local-smoke-test` | Run frontend/e2e tests in the local dockerized environment | |

### External dependencies

Expand Down
39 changes: 16 additions & 23 deletions backend/layers/common/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from enum import Enum
from typing import List, Optional

import uuid

from dataclasses_json import dataclass_json


Expand Down Expand Up @@ -98,43 +100,34 @@ def empty():


@dataclass
class CollectionId:
class EntityId:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great idea. I initially went for the same approach but I discarded it (but I can't remember the reason - maybe something with dataclasses?). Anyway if it works this is certainly great.

id: str

def __repr__(self) -> str:
return self.id


@dataclass
class CollectionVersionId:
id: str
def __init__(self, entity_id: str = None):
self.id = str(entity_id) if entity_id is not None else str(uuid.uuid4())

def __repr__(self) -> str:
return self.id


@dataclass
class DatasetId:
id: str
class CollectionId(EntityId):
pass

def __repr__(self) -> str:
return self.id

class CollectionVersionId(EntityId):
pass

@dataclass
class DatasetVersionId:
id: str

def __repr__(self) -> str:
return self.id
class DatasetId(EntityId):
pass


@dataclass
class DatasetArtifactId:
id: str
class DatasetVersionId(EntityId):
pass

def __repr__(self) -> str:
return self.id

class DatasetArtifactId(EntityId):
pass


@dataclass
Expand Down
98 changes: 44 additions & 54 deletions backend/layers/persistence/orm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String, Table
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
from sqlalchemy.dialects.postgresql import ARRAY, BOOLEAN, JSON, UUID
from sqlalchemy.orm import registry
from sqlalchemy.schema import MetaData
Expand All @@ -10,71 +10,61 @@


@mapper_registry.mapped
class Collection:
class CollectionTable:

__table__ = Table(
"Collection",
mapper_registry.metadata,
Column("id", UUID(as_uuid=True), primary_key=True),
Column("version_id", UUID(as_uuid=True)),
Column("originally_published_at", DateTime),
Column("tombstoned", BOOLEAN),
)
__tablename__ = "Collection"

id = Column(UUID(as_uuid=True), primary_key=True)
version_id = Column(UUID(as_uuid=True))
originally_published_at = Column(DateTime)
tombstone = Column(BOOLEAN)


@mapper_registry.mapped
class CollectionVersion:

__table__ = Table(
"CollectionVersion",
mapper_registry.metadata,
Column("version_id", UUID(as_uuid=True), primary_key=True),
Column("collection_id", UUID(as_uuid=True)),
Column("metadata", JSON),
Column("owner", String),
Column("curator_name", String),
Column("publisher_metadata", JSON),
Column("published_at", DateTime),
Column("created_at", DateTime),
Column("datasets", ARRAY(UUID(as_uuid=True))),
)
class CollectionVersionTable:

__tablename__ = "CollectionVersion"

id = Column(UUID(as_uuid=True), primary_key=True)
collection_id = Column(UUID(as_uuid=True))
collection_metadata = Column(JSON)
owner = Column(String)
curator_name = Column(String)
publisher_metadata = Column(JSON)
published_at = Column(DateTime)
created_at = Column(DateTime)
datasets = Column(ARRAY(UUID(as_uuid=True)))


@mapper_registry.mapped
class Dataset:
class DatasetTable:

__tablename__ = "Dataset"

__table__ = Table(
"Dataset",
mapper_registry.metadata,
Column("dataset_id", UUID(as_uuid=True), primary_key=True),
Column("dataset_version_id", UUID(as_uuid=True)),
Column("published_at", DateTime),
)
id = Column(UUID(as_uuid=True), primary_key=True)
version_id = Column(UUID(as_uuid=True))
published_at = Column(DateTime)


@mapper_registry.mapped
class DatasetVersion:
class DatasetVersionTable:

__table__ = Table(
"DatasetVersion",
mapper_registry.metadata,
Column("version_id", UUID(as_uuid=True), primary_key=True),
Column("dataset_id", UUID(as_uuid=True), ForeignKey("Dataset.dataset_id")),
Column("collection_id", UUID(as_uuid=True)),
Column("created_at", DateTime),
Column("metadata", JSON),
Column("artifacts", ARRAY(UUID(as_uuid=True))),
Column("status", JSON),
)
__tablename__ = "DatasetVersion"

id = Column(UUID(as_uuid=True), primary_key=True)
dataset_id = Column(UUID(as_uuid=True), ForeignKey("Dataset.id"))
collection_id = Column(UUID(as_uuid=True))
created_at = Column(DateTime)
dataset_metadata = Column(JSON)
artifacts = Column(ARRAY(UUID(as_uuid=True)))
status = Column(JSON)


@mapper_registry.mapped
class DatasetArtifact:

__table__ = Table(
"DatasetArtifact",
mapper_registry.metadata,
Column("id", UUID(as_uuid=True), primary_key=True),
Column("type", Enum(DatasetArtifactType)),
Column("uri", String),
)
class DatasetArtifactTable:

__tablename__ = "DatasetArtifact"

id = Column(UUID(as_uuid=True), primary_key=True)
type = Column(Enum(DatasetArtifactType))
uri = Column(String)
Loading