diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..daef88c --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,113 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = src/backend/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to src/backend/migirations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:src/backend/migirations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +hooks = black ruff +black.type = console_scripts +black.entrypoint = black +black.options = REVISION_SCRIPT_FILENAME +ruff.type = exec +ruff.executable = ruff +ruff.options = format REVISION_SCRIPT_FILENAME + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/pyproject.toml b/backend/pyproject.toml index a6a73dc..5f3855f 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -182,6 +182,8 @@ ignore = [ "S603", # Ignore complexity "C901", "PLR0911", "PLR0912", "PLR0913", "PLR0915", + # Allow use of typing.Optional, typing.List for type annotations + "UP007", "UP035", "UP006" ] unfixable = [ # Don't touch unused imports diff --git a/backend/src/backend/db/__init__.py b/backend/src/backend/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/backend/db/models.py b/backend/src/backend/db/models.py new file mode 100644 index 0000000..2044677 --- /dev/null +++ b/backend/src/backend/db/models.py @@ -0,0 +1,129 @@ +from datetime import datetime +from ipaddress import IPv4Address +from typing import List, Optional +from uuid import UUID + +from sqlalchemy import DateTime, Enum, ForeignKey, UniqueConstraint, text +from sqlalchemy.dialects.postgresql import ARRAY, CITEXT, INET +from sqlalchemy.orm import ( + DeclarativeBase, + Mapped, + MappedAsDataclass, + mapped_column, + relationship, +) +from sqlalchemy.sql.schema import MetaData + +from backend.enums import StatusEnum + + +class Base(MappedAsDataclass, DeclarativeBase): + # This map details the specific transformation of types between Python and + # PostgreSQL. This is only needed for the case where a specific PostgreSQL + # type has to be used. + + type_annotation_map = { # noqa: RUF012 + str: CITEXT, # transform Python str to PostgreSQL CITEXT + List[str]: ARRAY( + item_type=CITEXT + ), # transform Python List[str] into PostgreSQL Array of strings + datetime: DateTime( + timezone=False + ), # transform Python datetime into PostgreSQL Datetime without timezone + IPv4Address: INET, # transform Python IPV4Address into PostgreSQL INET + } + + # This metadata specifies some naming conventions that will be used by + # alembic to generate constraints names (indexes, unique constraints, ...) + metadata = MetaData( + naming_convention={ + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s", + } + ) + pass + + +class Country(Base): + __tablename__ = "country" + + code: Mapped[str] = mapped_column(primary_key=True) + name: Mapped[str] + + worker_id: Mapped[Optional[UUID]] = mapped_column( + ForeignKey("worker.id"), init=False + ) + worker: Mapped[Optional["Worker"]] = relationship( + back_populates="countries", init=False + ) + mirrors: Mapped[List["Mirror"]] = relationship( + back_populates="country", + init=False, + cascade="all, delete-orphan", + ) + + __table_args__ = (UniqueConstraint("name", "code"),) + + +class Mirror(Base): + __tablename__ = "mirror" + + base_url: Mapped[str] = mapped_column(primary_key=True) + enabled: Mapped[bool] + # metadata of a mirror from MirroBrain (https://mirrorbrain-docs.readthedocs.io/en/latest/mirrors.html#displaying-details-about-a-mirror) + id: Mapped[Optional[str]] + region: Mapped[Optional[str]] + asn: Mapped[Optional[str]] + score: Mapped[Optional[int]] + latitude: Mapped[Optional[float]] + longitude: Mapped[Optional[float]] + country_only: Mapped[Optional[bool]] + region_only: Mapped[Optional[bool]] + as_only: Mapped[Optional[bool]] + other_countries: Mapped[Optional[List[str]]] + + country_code: Mapped[str] = mapped_column( + ForeignKey("country.code"), + init=False, + ) + country: Mapped["Country"] = relationship(back_populates="mirrors", init=False) + + +class Worker(Base): + __tablename__ = "worker" + id: Mapped[UUID] = mapped_column( + init=False, primary_key=True, server_default=text("uuid_generate_v4()") + ) + auth_info: Mapped[str] + last_seen: Mapped[Optional[datetime]] + countries: Mapped[List["Country"]] = relationship( + back_populates="worker", init=False + ) + + +class Test(Base): + __tablename__ = "test" + id: Mapped[UUID] = mapped_column( + init=False, primary_key=True, server_default=text("uuid_generate_v4()") + ) + requested_on: Mapped[datetime] + started_on: Mapped[Optional[datetime]] + status: Mapped[Optional[StatusEnum]] = mapped_column( + Enum( + native_enum=False, + validate_strings=True, + create_constraint=True, + name="status", + ) + ) + error: Mapped[Optional[str]] + ip_address: Mapped[Optional[IPv4Address]] + asn: Mapped[Optional[str]] + location: Mapped[Optional[str]] + latency: Mapped[Optional[int]] # milliseconds + download_size: Mapped[Optional[int]] # bytes + duration: Mapped[Optional[int]] # seconds + speed: Mapped[Optional[float]] # bytes per second diff --git a/backend/src/backend/enums.py b/backend/src/backend/enums.py new file mode 100644 index 0000000..736658b --- /dev/null +++ b/backend/src/backend/enums.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class StatusEnum(Enum): + MISSED = 0 + SUCCEEDED = 1 + ERRORED = 2 diff --git a/backend/src/backend/migrations/env.py b/backend/src/backend/migrations/env.py new file mode 100644 index 0000000..634c406 --- /dev/null +++ b/backend/src/backend/migrations/env.py @@ -0,0 +1,73 @@ +import os +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import create_engine + +from backend.db.models import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = os.getenv("POSTGRES_URI", "") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = create_engine(os.getenv("POSTGRES_URI", ""), echo=False) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/src/backend/migrations/script.py.mako b/backend/src/backend/migrations/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/backend/src/backend/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/src/backend/migrations/versions/d45beab913d1_set_up_database_models.py b/backend/src/backend/migrations/versions/d45beab913d1_set_up_database_models.py new file mode 100644 index 0000000..6e745d5 --- /dev/null +++ b/backend/src/backend/migrations/versions/d45beab913d1_set_up_database_models.py @@ -0,0 +1,105 @@ +"""set up database models + +Revision ID: d45beab913d1 +Revises: +Create Date: 2024-06-03 07:46:35.661038 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = "d45beab913d1" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "test", + sa.Column( + "id", + sa.Uuid(), + server_default=sa.text("uuid_generate_v4()"), + nullable=False, + ), + sa.Column("requested_on", sa.DateTime(), nullable=False), + sa.Column("started_on", sa.DateTime(), nullable=True), + sa.Column( + "status", + sa.Enum(name="status", native_enum=False, create_constraint=True), + nullable=True, + ), + sa.Column("error", postgresql.CITEXT(), nullable=True), + sa.Column("ip_address", postgresql.INET(), nullable=True), + sa.Column("asn", postgresql.CITEXT(), nullable=True), + sa.Column("location", postgresql.CITEXT(), nullable=True), + sa.Column("latency", sa.Integer(), nullable=True), + sa.Column("download_size", sa.Integer(), nullable=True), + sa.Column("duration", sa.Integer(), nullable=True), + sa.Column("speed", sa.Float(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_test")), + ) + op.create_table( + "worker", + sa.Column( + "id", + sa.Uuid(), + server_default=sa.text("uuid_generate_v4()"), + nullable=False, + ), + sa.Column("auth_info", postgresql.CITEXT(), nullable=False), + sa.Column("last_seen", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", name=op.f("pk_worker")), + ) + op.create_table( + "country", + sa.Column("code", postgresql.CITEXT(), nullable=False), + sa.Column("name", postgresql.CITEXT(), nullable=False), + sa.Column("worker_id", sa.Uuid(), nullable=True), + sa.ForeignKeyConstraint( + ["worker_id"], ["worker.id"], name=op.f("fk_country_worker_id_worker") + ), + sa.PrimaryKeyConstraint("code", name=op.f("pk_country")), + sa.UniqueConstraint("name", "code", name=op.f("uq_country_name")), + ) + op.create_table( + "mirror", + sa.Column("base_url", postgresql.CITEXT(), nullable=False), + sa.Column("enabled", sa.Boolean(), nullable=False), + sa.Column("id", postgresql.CITEXT(), nullable=True), + sa.Column("region", postgresql.CITEXT(), nullable=True), + sa.Column("asn", postgresql.CITEXT(), nullable=True), + sa.Column("score", sa.Integer(), nullable=True), + sa.Column("latitude", sa.Float(), nullable=True), + sa.Column("longitude", sa.Float(), nullable=True), + sa.Column("country_only", sa.Boolean(), nullable=True), + sa.Column("region_only", sa.Boolean(), nullable=True), + sa.Column("as_only", sa.Boolean(), nullable=True), + sa.Column( + "other_countries", postgresql.ARRAY(postgresql.CITEXT()), nullable=True + ), + sa.Column("country_code", postgresql.CITEXT(), nullable=False), + sa.ForeignKeyConstraint( + ["country_code"], + ["country.code"], + name=op.f("fk_mirror_country_code_country"), + ), + sa.PrimaryKeyConstraint("base_url", name=op.f("pk_mirror")), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("mirror") + op.drop_table("country") + op.drop_table("worker") + op.drop_table("test") + # ### end Alembic commands ###