Skip to content

Commit

Permalink
Merge commit '9c792e4754c16f539e9bd519f6a57e2dcd957454' into feat/spe…
Browse files Browse the repository at this point in the history
…cies-pages/observers
  • Loading branch information
edelclaux committed Feb 3, 2025
2 parents 1b1d22f + 9c792e4 commit 7fd401b
Show file tree
Hide file tree
Showing 38 changed files with 543 additions and 406 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/cypress.yml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ jobs:
cp ./config/settings.ini.sample ./config/settings.ini
./install/05_install_frontend.sh --ci
env:
GEONATURE_CONFIG_FILE: '${{ github.workspace }}/config/test_config.toml'
GEONATURE_CONFIG_FILE: "${{ github.workspace }}/config/test_config.toml"
- name: Install core modules
run: |
geonature install-gn-module contrib/occtax OCCTAX --build=false
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/sphinx.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
name: "Build Sphinx Documentation"

on:
release:
types: [published]
push:
branches:
- master

jobs:
docs:
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.15.1
2.15.2
3 changes: 2 additions & 1 deletion backend/geonature/core/gn_meta/models/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from geonature.core.gn_permissions.tools import get_scopes_by_action
from geonature.core.gn_commons.models import cor_field_dataset, cor_module_dataset

from ref_geo.models import LAreas

from .commons import *


Expand Down Expand Up @@ -345,6 +345,7 @@ def filter_by_creatable(cls, module_code, *, query, user=None, object_code=None)

@qfilter(query=True)
def filter_by_areas(cls, areas, *, query):
from ref_geo.models import LAreas
from geonature.core.gn_synthese.models import Synthese

areaFilter = []
Expand Down
8 changes: 7 additions & 1 deletion backend/geonature/core/gn_synthese/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,13 @@ class Synthese(DB.Model):
meta_update_date = DB.Column(DB.DateTime, server_default=FetchedValue())
last_action = DB.Column(DB.Unicode)

areas = relationship(LAreas, secondary=corAreaSynthese, backref="synthese_obs")
areas = relationship(
LAreas,
secondary=corAreaSynthese,
primaryjoin=(corAreaSynthese.c.id_synthese == id_synthese),
secondaryjoin=(corAreaSynthese.c.id_area == LAreas.id_area),
backref="synthese_obs",
)
area_attachment = relationship(LAreas, foreign_keys=[id_area_attachment])
validations = relationship(TValidations, backref="attached_row")
last_validation = relationship(last_validation, uselist=False, viewonly=True)
Expand Down
4 changes: 2 additions & 2 deletions backend/geonature/core/imports/checks/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,11 @@ class ImportCodeError:
MISSING_GEOM : str
The geometry is missing
GEOMETRY_OUTSIDE : str
The geometry is outside the polygon in the GeoNature configuration (`INSTANCE_BOUNDING_BOX`)
The geometry is outside the polygon defined by ID_AREA_RESTRICTION in the configuration
NO_GEOM : str
No geometry given (wherever WKT or latitude/longitude)
GEOMETRY_OUT_OF_BOX : str
The geometry is outside the perimeter of the instance geography # FIXME: clarify (confusion with GEOMETRY_OUTSIDE)
The geometry is outside of a bounding box
ERRONEOUS_PARENT_ENTITY : str
The parent entity is not valid
NO_PARENT_ENTITY : str
Expand Down
2 changes: 1 addition & 1 deletion backend/geonature/core/imports/checks/sql/extra.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,7 @@ def check_entity_data_consistency(imprt, entity, fields, grouping_field):
select(hashedRows.c.grouping_col.label("grouping_col"))
.group_by(hashedRows.c.grouping_col)
.having(func.count(func.distinct(hashedRows.c.hashed)) > 1)
)
).cte()

# note: rows are unidentified (None) instead of being marked as invalid (False) in order to avoid running checks
report_erroneous_rows(
Expand Down
17 changes: 1 addition & 16 deletions backend/geonature/core/imports/config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,6 @@
# If VALUE MAPPING is not allowed, you must specify the DEFAULT_VALUE_MAPPING_ID
DEFAULT_VALUE_MAPPING_ID = 3

INSTANCE_BOUNDING_BOX = [-5.0, 41, 10, 51.15]

ALLOW_FIELD_MAPPING = True
DEFAULT_FIELD_MAPPING_ID = 1
# Parameter to define if the checkbox allowing to change display mode is displayed or not.
DISPLAY_CHECK_BOX_MAPPED_FIELD = True

# Parameter to define the rank shown in the doughnut chart in the import report
# must be in ['regne', 'phylum', 'classe', 'ordre', 'famille', 'sous_famille', 'tribu', 'group1_inpn', 'group2_inpn']
DEFAULT_RANK = "regne"
Expand All @@ -96,20 +89,12 @@ class ImportConfigSchema(Schema):
) # FIXME: unused
FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE = fields.Boolean(load_default=True)
DISPLAY_MAPPED_VALUES = fields.Boolean(load_default=True) # FIXME: unused
INSTANCE_BOUNDING_BOX = fields.List(
fields.Float, load_default=INSTANCE_BOUNDING_BOX
) # FIXME: unused
ENABLE_BOUNDING_BOX_CHECK = fields.Boolean(load_default=True) # FIXME : unused
# When setting PER_DATASET_UUID_CHECK=True (used for import in synthese):
# - Replace the unicity constraint on unique_id_sinp with an unicity constraint on (unique_id_sinp,id_dataset).
# - Disable per-row dataset import by setting display=False in gn_imports.bib_fields
# for the id_dataset field belonging to synthese destination.
PER_DATASET_UUID_CHECK = fields.Boolean(load_default=False)
ALLOW_FIELD_MAPPING = fields.Boolean(load_default=ALLOW_FIELD_MAPPING) # FIXME: unused
DEFAULT_FIELD_MAPPING_ID = fields.Integer(
load_default=DEFAULT_FIELD_MAPPING_ID
) # FIXME: unused
DISPLAY_CHECK_BOX_MAPPED_FIELD = fields.Boolean(load_default=True)

CHECK_PRIVATE_JDD_BLURING = fields.Boolean(load_default=True)
CHECK_REF_BIBLIO_LITTERATURE = fields.Boolean(load_default=True)
CHECK_EXIST_PROOF = fields.Boolean(load_default=True)
Expand Down
60 changes: 34 additions & 26 deletions backend/geonature/core/imports/routes/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ def preview_valid_data(scope, imprt):

# Retrieve data for each entity from entries in the transient table which are related to the import
transient_table = imprt.destination.get_transient_table()
entities = db.session.scalars(
entities: list[Entity] = db.session.scalars(
select(Entity).filter_by(destination=imprt.destination).order_by(Entity.order)
).all()

Expand All @@ -493,46 +493,54 @@ def preview_valid_data(scope, imprt):
.all()
)
columns = [{"prop": field.dest_column, "name": field.name_field} for field in fields]
columns_to_count_unique_entities = [
transient_table.c[field.dest_column] for field in fields
]

valid_data = db.session.execute(
select(*[transient_table.c[field.dest_field] for field in fields])
.distinct()
.where(
transient_table.c.id_import == imprt.id_import,
transient_table.c[entity.validity_column] == True,
)
.limit(100)
).all()
id_field = (
entity.unique_column.dest_field if entity.unique_column.dest_field in fields else None
)
data_fields_query = [transient_table.c[field.dest_field] for field in fields]

n_valid_data = db.session.execute(
select(func.count(func.distinct(*columns_to_count_unique_entities)))
.select_from(transient_table)
.where(
transient_table.c.id_import == imprt.id_import,
transient_table.c[entity.validity_column] == True,
)
).scalar()
query = select(*data_fields_query).where(
transient_table.c.id_import == imprt.id_import,
transient_table.c[entity.validity_column] == True,
)
valid_data = db.session.execute(query.limit(100)).all()

n_invalid_data = db.session.execute(
select(func.count(func.distinct(*columns_to_count_unique_entities)))
.select_from(transient_table)
def count_select(query_cte):
count_ = "*"
# if multiple entities and the entity has a unique column we base the count on the unique column

if entity.unique_column and len(entities) > 1 and id_field:
count_ = func.distinct(query_cte.c[id_field])
return count_

valid_data_cte = query.cte()
n_valid_data = db.session.scalar(
select(func.count(count_select(valid_data_cte))).select_from(valid_data_cte)
)

invalid_data_cte = (
select(data_fields_query)
.where(
transient_table.c.id_import == imprt.id_import,
transient_table.c[entity.validity_column] == False,
)
).scalar()
.cte()
)

n_invalid_data = db.session.scalar(
select(func.count(count_select(invalid_data_cte))).select_from(invalid_data_cte)
)

data["entities"].append(
{
"entity": entity.as_dict(),
"columns": columns,
"valid_data": valid_data,
"n_valid_data": n_valid_data,
"n_invalid_data": n_invalid_data,
"n_invalid_data": n_invalid_data, # NOTE: Not used in the frontend ...
}
)

return jsonify(data)


Expand Down
24 changes: 24 additions & 0 deletions backend/geonature/tests/imports/test_imports_occhab.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,3 +633,27 @@ def test_bbox_computation_transient(
]
],
}

@pytest.mark.parametrize("import_file_name", ["valid_file.csv"])
def test_preview_data(self, client, prepared_import):
valid_numbers = {
"station_valid": 7,
"station_invalid": 8,
"habitat_valid": 11,
"habitat_invalid": 23,
}
imprt = prepared_import
with logged_user(client, imprt.authors[0]):
response = client.get(url_for("import.preview_valid_data", import_id=imprt.id_import))
assert response.status_code == 200
data = response.json

index_data_station = 0 if data["entities"][0]["entity"]["code"] == "station" else 1
data_station = data["entities"][index_data_station]
data_habitat = data["entities"][0 if index_data_station == 1 else 1]

assert data_station["n_valid_data"] == valid_numbers["station_valid"]
assert data_station["n_invalid_data"] == valid_numbers["station_invalid"]

assert data_habitat["n_valid_data"] == valid_numbers["habitat_valid"]
assert data_habitat["n_invalid_data"] == valid_numbers["habitat_invalid"]
17 changes: 16 additions & 1 deletion backend/geonature/utils/config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,16 @@ def validate_provider(self, data, **kwargs):
ProviderConfigurationSchema().load(provider, unknown=INCLUDE)


class AuthenticationFrontendConfig(AuthenticationConfig):

@post_load
def post_load(self, data, **kwargs):
data["PROVIDERS"] = [
{"id_provider": provider["id_provider"]} for provider in data["PROVIDERS"]
]
return data


class GnPySchemaConf(Schema):
SQLALCHEMY_DATABASE_URI = fields.String(
required=True,
Expand Down Expand Up @@ -208,6 +218,9 @@ class GnPySchemaConf(Schema):
SERVER = fields.Nested(ServerConfig, load_default=ServerConfig().load({}))
MEDIAS = fields.Nested(MediasConfig, load_default=MediasConfig().load({}))
ALEMBIC = fields.Nested(AlembicConfig, load_default=AlembicConfig().load({}))
AUTHENTICATION = fields.Nested(
AuthenticationConfig, load_default=AuthenticationConfig().load({}), unknown=INCLUDE
)

@post_load()
def folders(self, data, **kwargs):
Expand Down Expand Up @@ -582,7 +595,9 @@ class GnGeneralSchemaConf(Schema):
PROFILES_REFRESH_CRONTAB = fields.String(load_default="0 3 * * *")
MEDIA_CLEAN_CRONTAB = fields.String(load_default="0 1 * * *")
AUTHENTICATION = fields.Nested(
AuthenticationConfig, load_default=AuthenticationConfig().load({}), unknown=INCLUDE
AuthenticationFrontendConfig,
load_default=AuthenticationFrontendConfig().load({}),
unknown=INCLUDE,
)

@validates_schema
Expand Down
25 changes: 2 additions & 23 deletions config/default_config.toml.example
Original file line number Diff line number Diff line change
Expand Up @@ -611,11 +611,11 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *"
[AUTHENTICATION]
DEFAULT_RECONCILIATION_GROUP_ID = 2
[[AUTHENTICATION.PROVIDERS]]
module="pypnusershub.auth.providers.default.DefaultConfiguration"
module="pypnusershub.auth.providers.default.LocalProvider"
id_provider="local_provider"

[[AUTHENTICATION.PROVIDERS]]
module="pypnusershub.auth.providers.openid_provider.OpenIDConnectProvider"
module="pypnusershub.auth.providers.openid_provider.OpenIDProvider"
id_provider = "google"
logo = "<i class='fa fa-google' aria-hidden='true'></i>"
label = "Google"
Expand All @@ -627,21 +627,6 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *"
# Encodage des fichiers importés autorisées
ENCODAGE = ["UTF-8"]

# Bounding box des données de l'instance.
# Utilisé pour lever des warning lorsque les données sont en dehors.
# Format: [XMIN, YMIN, XMAX, YMAX]
# Par défaut: France métropolitaine incluant la Corse
INSTANCE_BOUNDING_BOX = [-5.0, 41.0, 10.0, 51.15]

# Activer la vérification de l'appartenance des données importées à la bounding
# box définie dans INSTANCE_BOUNDING_BOX
ENABLE_BOUNDING_BOX_CHECK = true

# Activer la vérification des UUID dans les données importées avec ceux présent dans la Synthèse
# Attention : Il est conseillé de désactiver cette option lorsque les données de la synthèse
# sont très volumineuses.
ENABLE_SYNTHESE_UUID_CHECK = true

# Taille maximale du fichier chargé (en Mo)
MAX_FILE_SIZE=500

Expand All @@ -663,12 +648,6 @@ MEDIA_CLEAN_CRONTAB = "0 1 * * *"
# Leve un warning et non une erreur sur les lignes concernées
FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE = false

# Encodages acceptés
ENCODAGE = ["UTF-8"]

# Afficher le bouton pour afficher/masquer les champs déjà mappés
DISPLAY_CHECK_BOX_MAPPED_FIELD = true

# Active la vérification de l'existence du champs "floutage" si le JDD est privé
CHECK_PRIVATE_JDD_BLURING = true

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
"""fix_typo
Revision ID: 9c3e1f98361f
Revises: c1a6b0793360
Create Date: 2025-01-20 16:09:12.490217
"""

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = "9c3e1f98361f"
down_revision = "c1a6b0793360"
branch_labels = None
depends_on = None

OLD_NAME_MAPPING = "Occhab"
NEW_NAME_MAPPING = "Occhab GeoNature"


def get_table():
conn = op.get_bind()
metadata = sa.MetaData(bind=conn)
bib_fields = sa.Table("bib_fields", metadata, schema="gn_imports", autoload_with=op.get_bind())
destinations = sa.Table(
"bib_destinations", metadata, schema="gn_imports", autoload_with=op.get_bind()
)
t_mappings = sa.Table("t_mappings", metadata, schema="gn_imports", autoload_with=op.get_bind())
return bib_fields, destinations, t_mappings


def get_id_dest_occhab():
_, destinations, _ = get_table()
id_destination_occhab = (
op.get_bind()
.execute(sa.select(destinations.c.id_destination).where(destinations.c.code == "occhab"))
.scalar()
)
return id_destination_occhab


def upgrade():
bib_fields, destinations, t_mappings = get_table()
op.execute(
sa.update(bib_fields)
.where(
bib_fields.c.name_field == "depth_max",
bib_fields.c.id_destination == get_id_dest_occhab(),
)
.values(dest_field="depth_max")
)

op.execute(
sa.update(t_mappings)
.where(t_mappings.c.label == OLD_NAME_MAPPING)
.values(label=NEW_NAME_MAPPING)
)


def downgrade():
bib_fields, _, t_mappings = get_table()
op.execute(
sa.update(bib_fields)
.where(
bib_fields.c.name_field == "depth_max",
bib_fields.c.id_destination == get_id_dest_occhab(),
)
.values(dest_field="depth_min")
)
op.execute(
sa.update(t_mappings)
.where(t_mappings.c.label == NEW_NAME_MAPPING)
.values(label=OLD_NAME_MAPPING)
)
Loading

0 comments on commit 7fd401b

Please sign in to comment.