Skip to content

Commit

Permalink
chore: set tools line-height to 88 characters
Browse files Browse the repository at this point in the history
  • Loading branch information
jfcalvo committed Jul 18, 2024
1 parent 20ae663 commit 9dba7ef
Show file tree
Hide file tree
Showing 181 changed files with 7,945 additions and 2,255 deletions.
4 changes: 2 additions & 2 deletions argilla-server/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -155,14 +155,14 @@ exclude = [
"node_modules",
"venv",
]
line-length = 120
line-length = 88

[tool.ruff.per-file-ignores]
# Ignore imported but unused;
"__init__.py" = ["F401"]

[tool.black]
line-length = 120
line-length = 88

[tool.pdm.scripts]
_.env_file = ".env.dev"
Expand Down
5 changes: 4 additions & 1 deletion argilla-server/src/argilla_server/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
from argilla_server.pydantic_v1 import PYDANTIC_MAJOR_VERSION

if PYDANTIC_MAJOR_VERSION >= 2:
warnings.warn("The argilla_server package is not compatible with Pydantic 2. " "Please use Pydantic 1.x instead.")
warnings.warn(
"The argilla_server package is not compatible with Pydantic 2. "
"Please use Pydantic 1.x instead."
)
else:
from argilla_server._app import app # noqa
14 changes: 11 additions & 3 deletions argilla-server/src/argilla_server/_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,9 @@ async def redirect_api():
# This if-else clause is needed to simplify the test dependencies setup. Otherwise we cannot override dependencies
# easily. We can review this once we have separate fastapi application for the api and the webapp.
if settings.base_url and settings.base_url != "/":
_app = FastAPI(docs_url=None, redoc_url=None, openapi_url=None, redirect_slashes=False)
_app = FastAPI(
docs_url=None, redoc_url=None, openapi_url=None, redirect_slashes=False
)
_app.mount(settings.base_url, app)
return _app
else:
Expand Down Expand Up @@ -212,7 +214,11 @@ def configure_telemetry(app: FastAPI):
"""
)
message += "\n\n "
message += "#set ARGILLA_ENABLE_TELEMETRY=0" if os.name == "nt" else "$>export ARGILLA_ENABLE_TELEMETRY=0"
message += (
"#set ARGILLA_ENABLE_TELEMETRY=0"
if os.name == "nt"
else "$>export ARGILLA_ENABLE_TELEMETRY=0"
)
message += "\n"

@app.on_event("startup")
Expand All @@ -226,7 +232,9 @@ async def check_telemetry():

def configure_database(app: FastAPI):
def _user_has_default_credentials(user: User):
return user.api_key == DEFAULT_API_KEY or accounts.verify_password(DEFAULT_PASSWORD, user.password_hash)
return user.api_key == DEFAULT_API_KEY or accounts.verify_password(
DEFAULT_PASSWORD, user.password_hash
)

def _log_default_user_warning():
_LOGGER.warning(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,19 @@ def upgrade() -> None:
"workspaces_users",
sa.Column("id", sa.Uuid, primary_key=True),
sa.Column(
"workspace_id", sa.Uuid, sa.ForeignKey("workspaces.id", ondelete="CASCADE"), nullable=False, index=True
"workspace_id",
sa.Uuid,
sa.ForeignKey("workspaces.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column(
"user_id",
sa.Uuid,
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column("user_id", sa.Uuid, sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column("inserted_at", sa.DateTime, nullable=False),
sa.Column("updated_at", sa.DateTime, nullable=False),
sa.UniqueConstraint("workspace_id", "user_id", name="workspace_id_user_id_uq"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,12 @@
def upgrade() -> None:
record_status_enum.create(op.get_bind())

op.add_column("records", sa.Column("status", record_status_enum, server_default="pending", nullable=False))
op.add_column(
"records",
sa.Column(
"status", record_status_enum, server_default="pending", nullable=False
),
)
op.create_index(op.f("ix_records_status"), "records", ["status"], unique=False)

# NOTE: Updating existent records to have "completed" status when they have
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,17 @@ def upgrade() -> None:
sa.Column("name", sa.String, nullable=False, index=True),
sa.Column("title", sa.Text, nullable=False),
sa.Column("description", sa.Text),
sa.Column("required", sa.Boolean, nullable=False, server_default=expression.false()),
sa.Column(
"required", sa.Boolean, nullable=False, server_default=expression.false()
),
sa.Column("settings", sa.JSON, nullable=False),
sa.Column("dataset_id", sa.Uuid, sa.ForeignKey("datasets.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column(
"dataset_id",
sa.Uuid,
sa.ForeignKey("datasets.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column("inserted_at", sa.DateTime, nullable=False),
sa.Column("updated_at", sa.DateTime, nullable=False),
sa.UniqueConstraint("name", "dataset_id", name="question_name_dataset_id_uq"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,16 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(["question_id"], ["questions.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["record_id"], ["records.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("record_id", "question_id", name="suggestion_record_id_question_id_uq"),
sa.UniqueConstraint(
"record_id", "question_id", name="suggestion_record_id_question_id_uq"
),
)
op.create_index(
op.f("ix_suggestions_question_id"), "suggestions", ["question_id"], unique=False
)
op.create_index(
op.f("ix_suggestions_record_id"), "suggestions", ["record_id"], unique=False
)
op.create_index(op.f("ix_suggestions_question_id"), "suggestions", ["question_id"], unique=False)
op.create_index(op.f("ix_suggestions_record_id"), "suggestions", ["record_id"], unique=False)
op.create_index(op.f("ix_suggestions_type"), "suggestions", ["type"], unique=False)
# ### end Alembic commands ###

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,19 @@ def upgrade() -> None:
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["dataset_id"], ["datasets.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "dataset_id", name="vector_settings_name_dataset_id_uq"),
sa.UniqueConstraint(
"name", "dataset_id", name="vector_settings_name_dataset_id_uq"
),
)
op.create_index(
op.f("ix_vectors_settings_dataset_id"),
"vectors_settings",
["dataset_id"],
unique=False,
)
op.create_index(
op.f("ix_vectors_settings_name"), "vectors_settings", ["name"], unique=False
)
op.create_index(op.f("ix_vectors_settings_dataset_id"), "vectors_settings", ["dataset_id"], unique=False)
op.create_index(op.f("ix_vectors_settings_name"), "vectors_settings", ["name"], unique=False)
# ### end Alembic commands ###


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,30 @@ def upgrade() -> None:
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["dataset_id"], ["datasets.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "dataset_id", name="metadata_property_name_dataset_id_uq"),
sa.UniqueConstraint(
"name", "dataset_id", name="metadata_property_name_dataset_id_uq"
),
)
op.create_index(
op.f("ix_metadata_properties_dataset_id"),
"metadata_properties",
["dataset_id"],
unique=False,
)
op.create_index(
op.f("ix_metadata_properties_name"),
"metadata_properties",
["name"],
unique=False,
)
op.create_index(op.f("ix_metadata_properties_dataset_id"), "metadata_properties", ["dataset_id"], unique=False)
op.create_index(op.f("ix_metadata_properties_name"), "metadata_properties", ["name"], unique=False)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_metadata_properties_name"), table_name="metadata_properties")
op.drop_index(op.f("ix_metadata_properties_dataset_id"), table_name="metadata_properties")
op.drop_index(
op.f("ix_metadata_properties_dataset_id"), table_name="metadata_properties"
)
op.drop_table("metadata_properties")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@

def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("datasets", sa.Column("last_activity_at", sa.DateTime(), nullable=True))
op.add_column(
"datasets", sa.Column("last_activity_at", sa.DateTime(), nullable=True)
)
op.execute("UPDATE datasets SET last_activity_at = updated_at")
with op.batch_alter_table("datasets") as batch_op:
batch_op.alter_column("last_activity_at", nullable=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,18 @@ def upgrade() -> None:
sa.Column("id", sa.Uuid, primary_key=True),
sa.Column("fields", sa.JSON, nullable=False),
sa.Column("external_id", sa.String, index=True),
sa.Column("dataset_id", sa.Uuid, sa.ForeignKey("datasets.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column(
"dataset_id",
sa.Uuid,
sa.ForeignKey("datasets.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column("inserted_at", sa.DateTime, nullable=False),
sa.Column("updated_at", sa.DateTime, nullable=False),
sa.UniqueConstraint("external_id", "dataset_id", name="record_external_id_dataset_id_uq"),
sa.UniqueConstraint(
"external_id", "dataset_id", name="record_external_id_dataset_id_uq"
),
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@
depends_on = None

# Aligned with the values of `ResponseStatus` in `src/argilla/server/models/dataset.py`
response_status_enum = sa.Enum("draft", "submitted", "discarded", name="response_status_enum")
response_status_enum = sa.Enum(
"draft", "submitted", "discarded", name="response_status_enum"
)

# Aligned with the values of `DatasetStatus` in `src/argilla/server/models/dataset.py`
dataset_status_enum = sa.Enum("draft", "ready", name="dataset_status_enum")
Expand All @@ -54,21 +56,35 @@ def upgrade() -> None:
)

user_role_enum.create(bind)
op.execute("ALTER TABLE users ALTER COLUMN role TYPE user_role_enum USING role::user_role_enum")
op.execute(
"ALTER TABLE users ALTER COLUMN role TYPE user_role_enum USING role::user_role_enum"
)


def downgrade() -> None:
bind = op.get_bind()

if bind.dialect.name == "postgresql":
with op.batch_alter_table("users") as batch_op:
batch_op.alter_column("role", existing_type=user_role_enum, type_=sa.String(), nullable=False)
batch_op.alter_column(
"role", existing_type=user_role_enum, type_=sa.String(), nullable=False
)
user_role_enum.drop(bind)

with op.batch_alter_table("datasets") as batch_op:
batch_op.alter_column("status", existing_type=dataset_status_enum, type_=sa.String(), nullable=False)
batch_op.alter_column(
"status",
existing_type=dataset_status_enum,
type_=sa.String(),
nullable=False,
)
dataset_status_enum.drop(bind)

with op.batch_alter_table("responses") as batch_op:
batch_op.alter_column("status", existing_type=response_status_enum, type_=sa.String(), nullable=False)
batch_op.alter_column(
"status",
existing_type=response_status_enum,
type_=sa.String(),
nullable=False,
)
response_status_enum.drop(bind)
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,17 @@ def upgrade() -> None:
sa.Column("id", sa.Uuid, primary_key=True),
sa.Column("name", sa.String, nullable=False, index=True),
sa.Column("title", sa.Text, nullable=False),
sa.Column("required", sa.Boolean, nullable=False, server_default=expression.false()),
sa.Column(
"required", sa.Boolean, nullable=False, server_default=expression.false()
),
sa.Column("settings", sa.JSON, nullable=False),
sa.Column("dataset_id", sa.Uuid, sa.ForeignKey("datasets.id", ondelete="CASCADE"), nullable=False, index=True),
sa.Column(
"dataset_id",
sa.Uuid,
sa.ForeignKey("datasets.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column("inserted_at", sa.DateTime, nullable=False),
sa.Column("updated_at", sa.DateTime, nullable=False),
sa.UniqueConstraint("name", "dataset_id", name="field_name_dataset_id_uq"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,13 @@

def upgrade() -> None:
op.add_column(
"datasets", sa.Column("allow_extra_metadata", sa.Boolean(), server_default=sa.text("true"), nullable=False)
"datasets",
sa.Column(
"allow_extra_metadata",
sa.Boolean(),
server_default=sa.text("true"),
nullable=False,
),
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,17 @@ def upgrade() -> None:
sa.Column("guidelines", sa.Text),
sa.Column("status", sa.String, nullable=False, index=True),
sa.Column(
"workspace_id", sa.Uuid, sa.ForeignKey("workspaces.id", ondelete="CASCADE"), nullable=False, index=True
"workspace_id",
sa.Uuid,
sa.ForeignKey("workspaces.id", ondelete="CASCADE"),
nullable=False,
index=True,
),
sa.Column("inserted_at", sa.DateTime, nullable=False),
sa.Column("updated_at", sa.DateTime, nullable=False),
sa.UniqueConstraint("name", "workspace_id", name="dataset_name_workspace_id_uq"),
sa.UniqueConstraint(
"name", "workspace_id", name="dataset_name_workspace_id_uq"
),
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,25 @@ def upgrade() -> None:
sa.Column("inserted_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(["record_id"], ["records.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["vector_settings_id"], ["vectors_settings.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["vector_settings_id"], ["vectors_settings.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("record_id", "vector_settings_id", name="vector_record_id_vector_settings_id_uq"),
sa.UniqueConstraint(
"record_id",
"vector_settings_id",
name="vector_record_id_vector_settings_id_uq",
),
)
op.create_index(
op.f("ix_vectors_record_id"), "vectors", ["record_id"], unique=False
)
op.create_index(
op.f("ix_vectors_vector_settings_id"),
"vectors",
["vector_settings_id"],
unique=False,
)
op.create_index(op.f("ix_vectors_record_id"), "vectors", ["record_id"], unique=False)
op.create_index(op.f("ix_vectors_vector_settings_id"), "vectors", ["vector_settings_id"], unique=False)
# ### end Alembic commands ###


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@

def upgrade() -> None:
with op.batch_alter_table("suggestions") as batch_op:
batch_op.alter_column("score", type_=sa.JSON(), postgresql_using="to_json(score)")
batch_op.alter_column(
"score", type_=sa.JSON(), postgresql_using="to_json(score)"
)

op.execute(_score_update_statement())

Expand All @@ -50,7 +52,9 @@ def _score_update_statement() -> str:
elif op.get_context().dialect.name == "postgresql":
return "UPDATE suggestions SET score = NULL WHERE json_typeof(value) = 'array'"
else:
raise NotImplementedError(f"Unsupported database: {op.get_context().dialect.name}")
raise NotImplementedError(
f"Unsupported database: {op.get_context().dialect.name}"
)


def _score_float_update_statement() -> str:
Expand All @@ -73,4 +77,6 @@ def _score_float_update_statement() -> str:
END
"""
else:
raise NotImplementedError(f"Unsupported database: {op.get_context().dialect.name}")
raise NotImplementedError(
f"Unsupported database: {op.get_context().dialect.name}"
)
Loading

0 comments on commit 9dba7ef

Please sign in to comment.