diff --git a/VERSION b/VERSION index dedcc7d433..5d9ade10c6 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.9.1 +2.9.2 diff --git a/backend/geonature/app.py b/backend/geonature/app.py index f8d8c6e07a..b4eae49f47 100755 --- a/backend/geonature/app.py +++ b/backend/geonature/app.py @@ -149,6 +149,7 @@ def load_current_user(): ('geonature.core.auth.routes:routes', '/gn_auth'), ('geonature.core.gn_monitoring.routes:routes', '/gn_monitoring'), ('geonature.core.gn_profiles.routes:routes', '/gn_profiles'), + ('geonature.core.sensitivity.routes:routes', None), ]: module_name, blueprint_name = blueprint_path.split(':') blueprint = getattr(import_module(module_name), blueprint_name) diff --git a/backend/geonature/core/auth/routes.py b/backend/geonature/core/auth/routes.py index 67c8f37888..7b3a48d8bf 100644 --- a/backend/geonature/core/auth/routes.py +++ b/backend/geonature/core/auth/routes.py @@ -3,8 +3,6 @@ """ import datetime -from pypnusershub.db.models import User -from pypnusershub.schemas import UserSchema import xmltodict import logging from copy import copy @@ -24,9 +22,11 @@ from itsdangerous import TimedJSONWebSignatureSerializer as Serializer from utils_flask_sqla.response import json_resp +from pypnusershub.db.models import User, Organisme from pypnusershub.routes import insert_or_update_organism, insert_or_update_role from geonature.utils import utilsrequests from geonature.utils.errors import CasAuthentificationError +from geonature.utils.env import db routes = Blueprint("gn_auth", __name__, template_folder="templates") @@ -69,10 +69,9 @@ def loginCas(): raise CasAuthentificationError( "Error with the inpn authentification service", status_code=500 ) - info_user = response.json() - organism_id = info_user["codeOrganisme"] user = insert_user_and_org(info_user) + db.session.commit() # creation de la Response response = make_response(redirect(current_app.config["URL_APPLICATION"])) @@ -85,10 +84,18 @@ def loginCas(): response.set_cookie("token", token, expires=cookie_exp) # User cookie + organism_id = info_user["codeOrganisme"] + if not organism_id: + organism_id = ( + Organisme.query + .filter_by(nom_organisme='Autre') + .one() + .id_organisme + ) current_user = { "user_login": user["identifiant"], "id_role": user["id_role"], - "id_organisme": organism_id if organism_id else -1, + "id_organisme": organism_id, } response.set_cookie("current_user", str(current_user), expires=cookie_exp) return response @@ -153,16 +160,7 @@ def insert_user_and_org(info_user): if organism_id: organism = {"id_organisme": organism_id, "nom_organisme": organism_name} insert_or_update_organism(organism) - if not current_app.config["CAS"]["USERS_CAN_SEE_ORGANISM_DATA"] or organism_id is None: - # group socle 1 - group_id = current_app.config["BDD"]["ID_USER_SOCLE_1"] - else: - # group socle 2 - group_id = current_app.config["BDD"]["ID_USER_SOCLE_2"] - - group = User.query.get(group_id) - group_as_dict = UserSchema(exclude=["nom_complet"]).dump(group) - user = { + user_info = { "id_role": user_id, "identifiant": user_login, "nom_role": info_user["nom"], @@ -170,14 +168,16 @@ def insert_user_and_org(info_user): "id_organisme": organism_id, "email": info_user["email"], "active": True, - "groups": [group_as_dict] } - try: - insert_or_update_role(user) - except Exception as e: - log.info(e) - log.error(e) - raise CasAuthentificationError( - "Error insering user in GeoNature", status_code=500 - ) - return user + user_info = insert_or_update_role(user_info) + user = User.query.get(user_id) + if not user.groups: + if not current_app.config["CAS"]["USERS_CAN_SEE_ORGANISM_DATA"] or organism_id is None: + # group socle 1 + group_id = current_app.config["BDD"]["ID_USER_SOCLE_1"] + else: + # group socle 2 + group_id = current_app.config["BDD"]["ID_USER_SOCLE_2"] + group = User.query.get(group_id) + user.groups.append(group) + return user_info diff --git a/backend/geonature/core/sensitivity/routes.py b/backend/geonature/core/sensitivity/routes.py new file mode 100644 index 0000000000..1771e63e59 --- /dev/null +++ b/backend/geonature/core/sensitivity/routes.py @@ -0,0 +1,14 @@ +import click +from flask import Blueprint, current_app + +from geonature.utils.env import db + + +routes = Blueprint("sensitivity", __name__) + + +@routes.cli.command() +def update_synthese(): + count = db.session.execute("SELECT gn_synthese.update_sensitivity()").scalar() + db.session.commit() + click.echo(f"Sensitivity updated for {count} rows") diff --git a/backend/geonature/core/users/routes.py b/backend/geonature/core/users/routes.py index 99bc458c56..fbb12a46c6 100644 --- a/backend/geonature/core/users/routes.py +++ b/backend/geonature/core/users/routes.py @@ -35,6 +35,22 @@ s = requests.Session() +user_fields = { + 'id_role', + 'identifiant', + 'nom_role', + 'prenom_role', + 'nom_complet', + 'id_organisme', + 'groupe', + 'active', +} +organism_fields = { + 'id_organisme', + 'uuid_organisme', + 'nom_organisme', +} + # configuration of post_request actions for registrations REGISTER_POST_ACTION_FCT.update({ "create_temp_user": validate_temp_user, @@ -103,6 +119,7 @@ def getListes(): @routes.route("/role/", methods=["GET"]) +@permissions.login_required @json_resp def get_role(id_role): """ @@ -113,12 +130,13 @@ def get_role(id_role): :param id_role: the id user :type id_role: int """ - user = DB.session.query(User).filter_by(id_role=id_role).one() - return user.as_dict() + user = User.query.get_or_404(id_role) + return user.as_dict(fields=user_fields) @routes.route("/roles", methods=["GET"]) +@permissions.login_required @json_resp def get_roles(): """ @@ -127,7 +145,7 @@ def get_roles(): .. :quickref: User; """ params = request.args.to_dict() - q = DB.session.query(User) + q = User.query if "group" in params: q = q.filter(User.groupe == params["group"]) if "orderby" in params: @@ -136,10 +154,11 @@ def get_roles(): q = q.order_by(order_col) except AttributeError: log.error("the attribute to order on does not exist") - return [user.as_dict() for user in q.all()] + return [user.as_dict(fields=user_fields) for user in q.all()] @routes.route("/organisms", methods=["GET"]) +@permissions.login_required @json_resp def get_organismes(): """ @@ -148,20 +167,20 @@ def get_organismes(): .. :quickref: User; """ params = request.args.to_dict() - q = DB.session.query(BibOrganismes) + q = BibOrganismes.query if "orderby" in params: try: order_col = getattr(BibOrganismes.__table__.columns, params.pop("orderby")) q = q.order_by(order_col) except AttributeError: log.error("the attribute to order on does not exist") - return [organism.as_dict() for organism in q.all()] + return [organism.as_dict(fields=organism_fields) for organism in q.all()] @routes.route("/organisms_dataset_actor", methods=["GET"]) -@permissions.check_cruved_scope("R", True) +@permissions.login_required @json_resp -def get_organismes_jdd(info_role): +def get_organismes_jdd(): """ Get all organisms and the JDD where there are actor and where the current user hase autorization with its cruved @@ -183,7 +202,7 @@ def get_organismes_jdd(info_role): q = q.order_by(order_col) except AttributeError: log.error("the attribute to order on does not exist") - return [organism.as_dict() for organism in q.all()] + return [organism.as_dict(fields=organism_fields) for organism in q.all()] ######################### diff --git a/backend/geonature/migrations/versions/61e46813d621_update_synthese_sensitivity.py b/backend/geonature/migrations/versions/61e46813d621_update_synthese_sensitivity.py new file mode 100644 index 0000000000..633f629cf9 --- /dev/null +++ b/backend/geonature/migrations/versions/61e46813d621_update_synthese_sensitivity.py @@ -0,0 +1,69 @@ +"""Update synthese sensitivity + +Revision ID: 61e46813d621 +Revises: dde31e76ce45 +Create Date: 2022-02-15 15:23:08.732729 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '61e46813d621' +down_revision = 'dde31e76ce45' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute(""" + CREATE OR REPLACE FUNCTION gn_synthese.update_sensitivity() + RETURNS int4 + LANGUAGE plpgsql + AS $function$ + DECLARE + affected_rows_count int; + BEGIN + WITH cte AS ( + SELECT + id_synthese, + id_nomenclature_sensitivity AS old_sensitivity, + gn_sensitivity.get_id_nomenclature_sensitivity( + date_min::date, + taxonomie.find_cdref(cd_nom), + the_geom_local, + jsonb_build_object( + 'STATUT_BIO', id_nomenclature_bio_status, + 'OCC_COMPORTEMENT', id_nomenclature_behaviour + ) + ) AS new_sensitivity + FROM + gn_synthese.synthese + WHERE + id_nomenclature_sensitivity != ref_nomenclatures.get_id_nomenclature('SENSIBILITE', '0') -- non sensible + OR + taxonomie.find_cdref(cd_nom) IN (SELECT DISTINCT cd_ref FROM gn_sensitivity.t_sensitivity_rules_cd_ref) + ) + UPDATE + gn_synthese.synthese s + SET + id_nomenclature_sensitivity = new_sensitivity + FROM + cte + WHERE + s.id_synthese = cte.id_synthese + AND + old_sensitivity != new_sensitivity; + GET DIAGNOSTICS affected_rows_count = ROW_COUNT; + RETURN affected_rows_count; + END; + $function$ + ; + """) + + +def downgrade(): + op.execute(""" + DROP FUNCTION gn_synthese.update_sensitivity; + """) diff --git a/backend/geonature/migrations/versions/ac08dcf3f27b_diffusion_level.py b/backend/geonature/migrations/versions/ac08dcf3f27b_diffusion_level.py new file mode 100644 index 0000000000..e6fbe7b21a --- /dev/null +++ b/backend/geonature/migrations/versions/ac08dcf3f27b_diffusion_level.py @@ -0,0 +1,308 @@ +"""Do not auto-compute diffusion_level + +Revision ID: ac08dcf3f27b +Revises: dfec5f64ac73 +Create Date: 2022-02-10 12:45:05.472204 + +""" +from distutils.util import strtobool + +from alembic import op, context +import sqlalchemy as sa + +from utils_flask_sqla.migrations.utils import logger + + +# revision identifiers, used by Alembic. +revision = 'ac08dcf3f27b' +down_revision = 'dfec5f64ac73' +branch_labels = None +depends_on = None + + +""" +- Lors de l’insertion de données dans la synthèse, seule la sensibilité est calculé, + le niveau de diffusion est maintenant intouché. +- Le calcul de la sensibilité prend en compte le critère OCC_COMPORTEMENT en plus du + critère STATUT_BIO existant. +- Le trigger d’update de la synthèse est passé de AFTER à BEFORE, évitant d’effectuer + un deuxième UPDATE pour mettre à jour la sensibilité. +- Met NULL dans synthese.id_nomenclature_diffusion_level quand le niveau de diffusion + actuel correspond au niveau de sensibilité (laissé tel quel s’il ne correspond pas). +""" + + +def upgrade(): + clear_diffusion_level = context.get_x_argument(as_dictionary=True).get('clear-diffusion-level') + if clear_diffusion_level is not None: + clear_diffusion_level = bool(strtobool(clear_diffusion_level)) + else: + clear_diffusion_level = True + + op.execute(""" + DROP TRIGGER tri_insert_calculate_sensitivity ON gn_synthese.synthese + """) + op.execute(""" + DROP TRIGGER tri_update_calculate_sensitivity ON gn_synthese.synthese + """) + op.execute(""" + DROP FUNCTION gn_synthese.fct_tri_cal_sensi_diff_level_on_each_statement + """) + op.execute(""" + DROP FUNCTION gn_synthese.fct_tri_cal_sensi_diff_level_on_each_row + """) + op.execute(""" + CREATE FUNCTION gn_synthese.fct_tri_calculate_sensitivity_on_each_statement() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- Calculate sensitivity on insert in synthese + BEGIN + WITH cte AS ( + SELECT + id_synthese, + gn_sensitivity.get_id_nomenclature_sensitivity( + new_row.date_min::date, + taxonomie.find_cdref(new_row.cd_nom), + new_row.the_geom_local, + jsonb_build_object( + 'STATUT_BIO', new_row.id_nomenclature_bio_status, + 'OCC_COMPORTEMENT', new_row.id_nomenclature_behaviour + ) + ) AS id_nomenclature_sensitivity + FROM + NEW AS new_row + ) + UPDATE + gn_synthese.synthese AS s + SET + id_nomenclature_sensitivity = c.id_nomenclature_sensitivity + FROM + cte AS c + WHERE + c.id_synthese = s.id_synthese + ; + RETURN NULL; + END; + $function$ + ; + """) + op.execute(""" + CREATE FUNCTION gn_synthese.fct_tri_update_sensitivity_on_each_row() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- Calculate sensitivity on update in synthese + BEGIN + NEW.id_nomenclature_sensitivity = gn_sensitivity.get_id_nomenclature_sensitivity( + NEW.date_min::date, + taxonomie.find_cdref(NEW.cd_nom), + NEW.the_geom_local, + jsonb_build_object( + 'STATUT_BIO', NEW.id_nomenclature_bio_status, + 'OCC_COMPORTEMENT', NEW.id_nomenclature_behaviour + ) + ); + RETURN NEW; + END; + $function$ + ; + """) + op.execute(""" + CREATE TRIGGER + tri_insert_calculate_sensitivity + AFTER + INSERT + ON + gn_synthese.synthese + REFERENCING + NEW TABLE AS NEW + FOR EACH + STATEMENT + EXECUTE PROCEDURE + gn_synthese.fct_tri_calculate_sensitivity_on_each_statement() + """) + op.execute(""" + CREATE TRIGGER + tri_update_calculate_sensitivity + BEFORE UPDATE OF + date_min, + date_max, + cd_nom, + the_geom_local, + id_nomenclature_bio_status, + id_nomenclature_behaviour + ON + gn_synthese.synthese + FOR EACH + ROW + EXECUTE PROCEDURE + gn_synthese.fct_tri_update_sensitivity_on_each_row() + """) + + if clear_diffusion_level: + logger.info("Clearing diffusion level…") + count = op.get_bind().execute(""" + WITH cleared_rows AS ( + UPDATE + gn_synthese.synthese s + SET + id_nomenclature_diffusion_level = NULL + FROM + ref_nomenclatures.t_nomenclatures nomenc_sensitivity, + ref_nomenclatures.t_nomenclatures nomenc_diff_level + WHERE + nomenc_sensitivity.id_nomenclature = s.id_nomenclature_sensitivity + AND nomenc_diff_level.id_nomenclature = s.id_nomenclature_diffusion_level + AND nomenc_diff_level.cd_nomenclature = gn_sensitivity.calculate_cd_diffusion_level(NULL, nomenc_sensitivity.cd_nomenclature) + RETURNING s.id_synthese + ) + SELECT + count(*) + FROM + cleared_rows; + """).scalar() + logger.info("Cleared diffusion level on {} rows.".format(count)) + + +def downgrade(): + restore_diffusion_level = context.get_x_argument(as_dictionary=True).get('restore-diffusion-level') + if restore_diffusion_level is not None: + restore_diffusion_level = bool(strtobool(restore_diffusion_level)) + else: + restore_diffusion_level = True + + if restore_diffusion_level: + logger.info("Restore diffusion level…") + count = op.get_bind().execute(""" + WITH restored_rows AS ( + UPDATE + gn_synthese.synthese s + SET + id_nomenclature_diffusion_level = ref_nomenclatures.get_id_nomenclature( + 'NIV_PRECIS', + gn_sensitivity.calculate_cd_diffusion_level( + NULL, + nomenc_sensitivity.cd_nomenclature + ) + ) + FROM + ref_nomenclatures.t_nomenclatures nomenc_sensitivity + WHERE + nomenc_sensitivity.id_nomenclature = s.id_nomenclature_sensitivity + AND s.id_nomenclature_diffusion_level IS NULL + RETURNING s.id_synthese + ) + SELECT + count(*) + FROM + restored_rows + """).scalar() + logger.info("Restored diffusion level on {} rows.".format(count)) + + op.execute(""" + DROP TRIGGER tri_insert_calculate_sensitivity ON gn_synthese.synthese + """) + op.execute(""" + DROP TRIGGER tri_update_calculate_sensitivity ON gn_synthese.synthese + """) + op.execute(""" + DROP FUNCTION gn_synthese.fct_tri_calculate_sensitivity_on_each_statement + """) + op.execute(""" + DROP FUNCTION gn_synthese.fct_tri_update_sensitivity_on_each_row + """) + op.execute(""" + CREATE FUNCTION gn_synthese.fct_tri_cal_sensi_diff_level_on_each_statement() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- Calculate sensitivity and diffusion level on insert in synthese + BEGIN + WITH cte AS ( + SELECT + gn_sensitivity.get_id_nomenclature_sensitivity( + updated_rows.date_min::date, + taxonomie.find_cdref(updated_rows.cd_nom), + updated_rows.the_geom_local, + ('{"STATUT_BIO": ' || updated_rows.id_nomenclature_bio_status::text || '}')::jsonb + ) AS id_nomenclature_sensitivity, + id_synthese, + t_diff.cd_nomenclature as cd_nomenclature_diffusion_level + FROM NEW AS updated_rows + LEFT JOIN ref_nomenclatures.t_nomenclatures t_diff ON t_diff.id_nomenclature = updated_rows.id_nomenclature_diffusion_level + WHERE updated_rows.id_nomenclature_sensitivity IS NULL + ) + UPDATE gn_synthese.synthese AS s + SET + id_nomenclature_sensitivity = c.id_nomenclature_sensitivity, + id_nomenclature_diffusion_level = ref_nomenclatures.get_id_nomenclature( + 'NIV_PRECIS', + gn_sensitivity.calculate_cd_diffusion_level( + c.cd_nomenclature_diffusion_level, + t_sensi.cd_nomenclature + ) + + ) + FROM cte AS c + LEFT JOIN ref_nomenclatures.t_nomenclatures t_sensi ON t_sensi.id_nomenclature = c.id_nomenclature_sensitivity + WHERE c.id_synthese = s.id_synthese + ; + RETURN NULL; + END; + $function$ + ; + """) + op.execute(""" + CREATE TRIGGER tri_insert_calculate_sensitivity AFTER + INSERT + ON + gn_synthese.synthese REFERENCING NEW TABLE AS NEW FOR EACH STATEMENT EXECUTE PROCEDURE gn_synthese.fct_tri_cal_sensi_diff_level_on_each_statement() + """) + op.execute(""" + CREATE FUNCTION gn_synthese.fct_tri_cal_sensi_diff_level_on_each_row() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- Calculate sensitivity and diffusion level on update in synthese + DECLARE calculated_id_sensi integer; + BEGIN + SELECT + gn_sensitivity.get_id_nomenclature_sensitivity( + NEW.date_min::date, + taxonomie.find_cdref(NEW.cd_nom), + NEW.the_geom_local, + ('{"STATUT_BIO": ' || NEW.id_nomenclature_bio_status::text || '}')::jsonb + ) INTO calculated_id_sensi; + UPDATE gn_synthese.synthese + SET + id_nomenclature_sensitivity = calculated_id_sensi, + -- On ne met pas à jour le niveau de diffusion s'il a déjà une valeur + id_nomenclature_diffusion_level = CASE WHEN OLD.id_nomenclature_diffusion_level IS NULL THEN ( + SELECT ref_nomenclatures.get_id_nomenclature( + 'NIV_PRECIS', + gn_sensitivity.calculate_cd_diffusion_level( + ref_nomenclatures.get_cd_nomenclature(OLD.id_nomenclature_diffusion_level), + ref_nomenclatures.get_cd_nomenclature(calculated_id_sensi) + ) + ) + ) + ELSE OLD.id_nomenclature_diffusion_level + END + WHERE id_synthese = OLD.id_synthese + ; + RETURN NULL; + END; + $function$ + ; + """) + op.execute(""" + CREATE TRIGGER tri_update_calculate_sensitivity AFTER + UPDATE + OF date_min, + date_max, + cd_nom, + the_geom_local, + id_nomenclature_bio_status ON + gn_synthese.synthese FOR EACH ROW EXECUTE PROCEDURE gn_synthese.fct_tri_cal_sensi_diff_level_on_each_row() + """) diff --git a/backend/geonature/migrations/versions/dfec5f64ac73_fix_sensitivity_algo.py b/backend/geonature/migrations/versions/dfec5f64ac73_fix_sensitivity_algo.py new file mode 100644 index 0000000000..6f17ed5ecf --- /dev/null +++ b/backend/geonature/migrations/versions/dfec5f64ac73_fix_sensitivity_algo.py @@ -0,0 +1,203 @@ +"""Fix sensitivity algorithm + +Revision ID: dfec5f64ac73 +Revises: dde31e76ce45 +Create Date: 2022-01-28 14:06:38.748133 + +""" +from distutils.util import strtobool + +from alembic import op, context +import sqlalchemy as sa + +from utils_flask_sqla.migrations.utils import logger + + +# revision identifiers, used by Alembic. +revision = 'dfec5f64ac73' +down_revision = '61e46813d621' +branch_labels = None +depends_on = None + + +def upgrade(): + recompute_sensitivity = context.get_x_argument(as_dictionary=True).get('recompute-sensitivity') + if recompute_sensitivity is not None: + recompute_sensitivity = bool(strtobool(recompute_sensitivity)) + else: + recompute_sensitivity = True + + op.execute(""" + CREATE OR REPLACE FUNCTION gn_sensitivity.get_id_nomenclature_sensitivity(my_date_obs date, my_cd_ref integer, my_geom geometry, my_criterias jsonb) + RETURNS integer + LANGUAGE plpgsql + IMMUTABLE + AS $function$ + DECLARE + sensitivity integer; + BEGIN + -- Paramètres durée, zone géographique, période de l'observation et critères biologique + SELECT INTO sensitivity r.id_nomenclature_sensitivity + FROM gn_sensitivity.t_sensitivity_rules_cd_ref r + JOIN ref_nomenclatures.t_nomenclatures n ON n.id_nomenclature = r.id_nomenclature_sensitivity + LEFT OUTER JOIN gn_sensitivity.cor_sensitivity_area USING(id_sensitivity) + LEFT OUTER JOIN ref_geo.l_areas a USING(id_area) + LEFT OUTER JOIN gn_sensitivity.cor_sensitivity_criteria c USING(id_sensitivity) + WHERE + ( -- taxon + my_cd_ref = r.cd_ref + ) AND ( -- zone géographique de validité + a.geom IS NULL -- pas de restriction géographique à la validité de la règle + OR + st_intersects(my_geom, a.geom) + ) AND ( -- période de validité + to_char(my_date_obs, 'MMDD') between to_char(r.date_min, 'MMDD') and to_char(r.date_max, 'MMDD') + ) AND ( -- durée de validité + (date_part('year', CURRENT_TIMESTAMP) - r.sensitivity_duration) <= date_part('year', my_date_obs) + ) AND ( -- critère + c.id_criteria IS NULL -- règle sans restriction de critère + OR + -- Note: no need to check criteria type, as we use id_nomenclature which can not conflict + c.id_criteria IN (SELECT value::int FROM jsonb_each_text(my_criterias)) + ) + ORDER BY n.cd_nomenclature DESC; + + IF sensitivity IS NULL THEN + sensitivity := (SELECT ref_nomenclatures.get_id_nomenclature('SENSIBILITE'::text, '0'::text)); + END IF; + + return sensitivity; + END; + $function$ + """) + + if recompute_sensitivity: + logger.info("Recompute sensitivity…") + count = op.get_bind().execute("SELECT gn_synthese.update_sensitivity()").scalar() + logger.info(f"Sensitivity updated for {count} rows") + + +def downgrade(): + op.execute(""" + CREATE OR REPLACE FUNCTION gn_sensitivity.get_id_nomenclature_sensitivity(my_date_obs date, my_cd_ref integer, my_geom geometry, my_criterias jsonb) + RETURNS integer + LANGUAGE plpgsql + IMMUTABLE + AS $function$ + DECLARE + niv_precis integer; + niv_precis_null integer; + BEGIN + + niv_precis_null := (SELECT ref_nomenclatures.get_id_nomenclature('SENSIBILITE'::text, '0'::text)); + + -- ########################################## + -- TESTS unicritère + -- => Permet de voir si un critère est remplis ou non de façon à limiter au maximum + -- la requete globale qui croise l'ensemble des critères + -- ########################################## + + -- Paramètres cd_ref + IF NOT EXISTS ( + SELECT 1 + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + WHERE s.cd_ref = my_cd_ref + ) THEN + return niv_precis_null; + END IF; + + -- Paramètres durée de validité de la règle + IF NOT EXISTS ( + SELECT 1 + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + WHERE s.cd_ref = my_cd_ref + AND (date_part('year', CURRENT_TIMESTAMP) - sensitivity_duration) <= date_part('year', my_date_obs) + ) THEN + return niv_precis_null; + END IF; + + -- Paramètres période d'observation + IF NOT EXISTS ( + SELECT 1 + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + WHERE s.cd_ref = my_cd_ref + AND (to_char(my_date_obs, 'MMDD') between to_char(s.date_min, 'MMDD') and to_char(s.date_max, 'MMDD') ) + ) THEN + return niv_precis_null; + END IF; + + -- Paramètres critères biologiques + -- S'il existe un critère pour ce taxon + IF EXISTS ( + SELECT 1 + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + JOIN gn_sensitivity.cor_sensitivity_criteria c USING(id_sensitivity) + WHERE s.cd_ref = my_cd_ref + ) THEN + -- Si le critère est remplis + niv_precis := ( + + WITH RECURSIVE h_val(KEY, value, id_broader) AS ( + SELECT KEY, value::int, id_broader + FROM (SELECT * FROM jsonb_each_text(my_criterias)) d + JOIN ref_nomenclatures.t_nomenclatures tn + ON tn.id_nomenclature = d.value::int + UNION + SELECT KEY, id_nomenclature , tn.id_broader + FROM ref_nomenclatures.t_nomenclatures tn + JOIN h_val + ON tn.id_nomenclature = h_val.id_broader + WHERE NOT id_nomenclature = 0 + ) + SELECT DISTINCT id_nomenclature_sensitivity + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + JOIN gn_sensitivity.cor_sensitivity_criteria c USING(id_sensitivity) + JOIN h_val a + ON c.id_criteria = a.value + WHERE s.cd_ref = my_cd_ref + LIMIT 1 + ); + IF niv_precis IS NULL THEN + niv_precis := (SELECT ref_nomenclatures.get_id_nomenclature('SENSIBILITE'::text, '0'::text)); + return niv_precis; + END IF; + END IF; + + + + -- ########################################## + -- TESTS multicritères + -- => Permet de voir si l'ensemble des critères sont remplis + -- ########################################## + + -- Paramètres durée, zone géographique, période de l'observation et critères biologique + SELECT INTO niv_precis s.id_nomenclature_sensitivity + FROM ( + SELECT s.*, l.geom, c.id_criteria, c.id_type_nomenclature + FROM gn_sensitivity.t_sensitivity_rules_cd_ref s + LEFT OUTER JOIN gn_sensitivity.cor_sensitivity_area USING(id_sensitivity) + LEFT OUTER JOIN gn_sensitivity.cor_sensitivity_criteria c USING(id_sensitivity) + LEFT OUTER JOIN ref_geo.l_areas l USING(id_area) + ) s + WHERE my_cd_ref = s.cd_ref + AND (st_intersects(my_geom, s.geom) OR s.geom IS NULL) -- paramètre géographique + AND (-- paramètre période + (to_char(my_date_obs, 'MMDD') between to_char(s.date_min, 'MMDD') and to_char(s.date_max, 'MMDD') ) + ) + AND ( -- paramètre duré de validité de la règle + (date_part('year', CURRENT_TIMESTAMP) - sensitivity_duration) <= date_part('year', my_date_obs) + ) + AND ( -- paramètre critères + s.id_criteria IN (SELECT value::int FROM jsonb_each_text(my_criterias)) OR s.id_criteria IS NULL + ); + + IF niv_precis IS NULL THEN + niv_precis := niv_precis_null; + END IF; + + + return niv_precis; + + END; + $function$ + """) diff --git a/backend/geonature/tests/test_sensitivity.py b/backend/geonature/tests/test_sensitivity.py index 00ccf7bfd7..8a4322ff20 100644 --- a/backend/geonature/tests/test_sensitivity.py +++ b/backend/geonature/tests/test_sensitivity.py @@ -36,6 +36,11 @@ def test_get_id_nomenclature_sensitivity(self, app): mnemonique='Hibernation').one() statut_bio_reproduction = TNomenclatures.query.filter_by(id_type=statut_bio_type.id_type, mnemonique='Reproduction').one() + life_stage_type = BibNomenclaturesTypes.query.filter_by(mnemonique='STADE_VIE').one() + # We choose a life stage with the same cd_nomenclature than tested status bio + life_stage_conflict = TNomenclatures.query.filter_by(id_type=life_stage_type.id_type, + cd_nomenclature=statut_bio_hibernation.cd_nomenclature).one() + query = sa.select([TNomenclatures.mnemonique]) \ .where(TNomenclatures.id_nomenclature==func.gn_sensitivity.get_id_nomenclature_sensitivity( sa.cast(date_obs, sa.types.Date), @@ -54,140 +59,202 @@ def test_get_id_nomenclature_sensitivity(self, app): no_diffusion = TNomenclatures.query.filter_by(id_type=sensitivity_nomenc_type.id_type, mnemonique='4').one() + st_intersects = func.ST_Intersects(LAreas.geom, func.ST_Transform(geom, config['LOCAL_SRID'])) + deps = LAreas.query.join(BibAreasTypes).filter(BibAreasTypes.type_code=='DEP') + area_in = deps.filter(st_intersects).first() + area_out = deps.filter(sa.not_(st_intersects)).first() + with db.session.begin_nested(): rule = SensitivityRule(cd_nom=taxon.cd_nom, - nomenclature_sensitivity=no_diffusion, + nomenclature_sensitivity=diffusion_maille, sensitivity_duration=100) db.session.add(rule) with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') - assert(db.session.execute(query).scalar() == no_diffusion.mnemonique) + # Check the rule apply correctly + assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + + # Reduce rule duration and check rule does not apply anymore + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.sensitivity_duration = 1 with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() # restore rule duration + + # Change sensitivity to no diffusion + transaction = db.session.begin_nested() with db.session.begin_nested(): - rule.sensitivity_duration = 10 - rule.nomenclature_sensitivity = diffusion_maille + rule.nomenclature_sensitivity = no_diffusion with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') - assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + assert(db.session.execute(query).scalar() == no_diffusion.mnemonique) + transaction.rollback() # restore rule sensitivity + # Set rule validity period excluding observation date + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.date_min = date(1900, 4, 1) rule.date_max = date(1900, 6, 30) with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() + # Set rule validity period including observation date + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.date_min = date(1900, 2, 1) rule.date_max = date(1900, 4, 30) with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + # Disable the rule + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.active = False with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() - with db.session.begin_nested(): - rule.active = True - with db.session.begin_nested(): - db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') - + # Add a not matching bio status + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.criterias.append(statut_bio_reproduction) assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() + # Add a matching bio status + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.criterias.append(statut_bio_hibernation) assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + # Add a matching and a not matching bio status + # The rule should match as soon as as least one bio status match + transaction = db.session.begin_nested() with db.session.begin_nested(): - rule.criterias.remove(statut_bio_reproduction) + rule.criterias.append(statut_bio_reproduction) + rule.criterias.append(statut_bio_hibernation) assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + # We add a not matching life stage, but with the same cd_nomenclature than + # status bio of the observation, and check that the rule does not apply even so. + transaction = db.session.begin_nested() with db.session.begin_nested(): - rule.criterias.remove(statut_bio_hibernation) - assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) - - f = func.ST_Intersects(LAreas.geom, func.ST_Transform(geom, config['LOCAL_SRID'])) - deps = LAreas.query.join(BibAreasTypes).filter(BibAreasTypes.type_code=='DEP') - area_in = deps.filter(f).first() - area_out = deps.filter(sa.not_(f)).first() + rule.criterias.append(life_stage_conflict) + assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() + # Add a matching area to the rule → the rule still applies + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.areas.append(area_in) - # l’observation est dans le périmètre d’application, la règle de sensibilité s’applique assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + # Add a not matching area to the rule → the rule does not apply anymore + transaction = db.session.begin_nested() with db.session.begin_nested(): rule.areas.append(area_out) - # l’observation est dans une des zones du périmètre d’application, la règle de sensibilité s’applique - assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) - - with db.session.begin_nested(): - rule.areas.remove(area_in) - # l’observation n’est pas dans le périmètre d’application de la règle de sensibilité assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() + # Add a matching and a not matching area to the rule + # The rule should apply as soon as at least one area match + transaction = db.session.begin_nested() with db.session.begin_nested(): - rule.areas.remove(area_out) - # the rule has no areas anymore, it applies + rule.areas.append(area_in) + rule.areas.append(area_out) assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + # Add a matching area but a not matching status bio + transaction = db.session.begin_nested() + with db.session.begin_nested(): + rule.areas.append(area_in) + rule.criterias.append(statut_bio_reproduction) + assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() + + # Add a second more restrictive rule with db.session.begin_nested(): rule2 = SensitivityRule(cd_nom=taxon.cd_nom, nomenclature_sensitivity=no_diffusion, sensitivity_duration=100) db.session.add(rule2) - # we have two rule, the more restrictive one apply - #assert(db.session.execute(query).scalar() == no_diffusion.mnemonique) # FIXME this test fail! + with db.session.begin_nested(): + db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') + rule1 = rule + + # Verify that the more restrictive rule match + assert(db.session.execute(query).scalar() == no_diffusion.mnemonique) + + # Add not matching bio status criteria on rule 2, but rule 1 should still apply + transaction = db.session.begin_nested() + with db.session.begin_nested(): + rule2.criterias.append(statut_bio_reproduction) # not matching + assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + + # Add not matching area on rule 2, but rule 1 should apply + transaction = db.session.begin_nested() + with db.session.begin_nested(): + rule2.areas.append(area_out) + assert(db.session.execute(query).scalar() == diffusion_maille.mnemonique) + transaction.rollback() + + # Add not matching area on rule 1, but rule 2 should apply + transaction = db.session.begin_nested() + with db.session.begin_nested(): + rule1.areas.append(area_out) + assert(db.session.execute(query).scalar() == no_diffusion.mnemonique) + transaction.rollback() + + # Add not matching area on rule 1, and not matching bio status on rule 2 + transaction = db.session.begin_nested() + with db.session.begin_nested(): + rule1.areas.append(area_out) + rule2.criterias.append(statut_bio_reproduction) # not matching + assert(db.session.execute(query).scalar() == not_sensitive.mnemonique) + transaction.rollback() def test_synthese_sensitivity(self, app): taxon = Taxref.query.first() sensitivity_nomenc_type = BibNomenclaturesTypes.query.filter_by(mnemonique='SENSIBILITE').one() - nomenc_maille = TNomenclatures.query.filter_by(id_type=sensitivity_nomenc_type.id_type, - mnemonique='2').one() + nomenc_not_sensitive = TNomenclatures.query.filter_by(id_type=sensitivity_nomenc_type.id_type, + mnemonique='0').one() nomenc_no_diff = TNomenclatures.query.filter_by(id_type=sensitivity_nomenc_type.id_type, mnemonique='4').one() with db.session.begin_nested(): - rule = SensitivityRule(cd_nom=taxon.cd_nom, nomenclature_sensitivity=nomenc_no_diff, - sensitivity_duration=100) + rule = SensitivityRule( + cd_nom=taxon.cd_nom, + nomenclature_sensitivity=nomenc_no_diff, + sensitivity_duration=5, + ) db.session.add(rule) with db.session.begin_nested(): db.session.execute('REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref') - date_obs = datetime.now() - geom = WKTElement('POINT(6.12 44.85)', srid=4326) - - query = func.gn_sensitivity.get_id_nomenclature_sensitivity( - sa.cast(date_obs, sa.types.Date), - taxon.cd_ref, - geom, - sa.cast({}, sa.dialects.postgresql.JSONB), - ) - id_nomenc = db.session.execute(query).scalar() - nomenc = TNomenclatures.query.get(id_nomenc) - assert(nomenc.mnemonique == nomenc_no_diff.mnemonique) + date_obs = datetime.now() with db.session.begin_nested(): s = Synthese(cd_nom=taxon.cd_nom, nom_cite='Sensitive taxon', - date_min=date_obs, date_max=date_obs)#, the_geom_4326=geom) + date_min=date_obs, date_max=date_obs) db.session.add(s) db.session.refresh(s) assert(s.id_nomenclature_sensitivity == nomenc_no_diff.id_nomenclature) - # verify setting id_nomenclature_sensitivity manually have precedence other sensitivity trigger + date_obs -= timedelta(days=365 * 10) with db.session.begin_nested(): - s = Synthese(cd_nom=taxon.cd_nom, nom_cite='Sensitive taxon', - date_min=date_obs, date_max=date_obs, - id_nomenclature_sensitivity=nomenc_maille.id_nomenclature) - db.session.add(s) + s.date_min = date_obs + s.date_max = date_obs db.session.refresh(s) - assert(s.id_nomenclature_sensitivity == nomenc_maille.id_nomenclature) + assert(s.id_nomenclature_sensitivity == nomenc_not_sensitive.id_nomenclature) diff --git a/docs/CHANGELOG.rst b/docs/CHANGELOG.rst index a85d61ec58..5d51d600bf 100644 --- a/docs/CHANGELOG.rst +++ b/docs/CHANGELOG.rst @@ -2,6 +2,44 @@ CHANGELOG ========= +2.9.2 (2022-02-15) +------------------ + +**🚀 Nouveautés** + +* Optimisation du nombre d’informations renvoyées par l’API pour les utilisateurs et les organismes +* Ajout d’une commande pour relancer le calcul de la sensibilité, utile en cas de modification du référentiel de sensibilité : ``geonature sensitivity update-synthese``. Elle s'appuie sur la fonction ``gn_synthese.update_sensitivity()``. +* Le niveau de diffusion dans la synthèse n’est plus calculé automatiquement à partir du niveau de sensibilité (#1711) +* Le niveau de sensibilité tient compte du comportement de l’occurrence (``OCC_COMPORTEMENT``), en plus du statut biologique (``STATUT_BIO``) +* Optimisation du recalcul de la sensibilité lors de la mise à jour de la synthèse (trigger ``BEFORE`` au lieu de ``AFTER``) +* Ajout de tests unitaires sur les fonctions de calcul de la sensibilité + +**🐛 Corrections** + +* Correction d’une régression sur la récupération de la liste des taxons (#1674) +* Correction de l’authentification au CAS de l’INPN +* Correction du calcul de la sensibilité (#1284) : + + * Gestion correcte de la présence de plusieurs règles avec et sans critère statut biologique + * Utilisation de la règle la plus sensible quand plusieurs règles s’appliquent + +**⚠️ Notes de version** + +* La correction de la fonction de calcul de la sensibilité est suivie d’un recalcul automatique du niveau de sensibilité des données présentes dans la synthèse. Si vous ne souhaitez pas procéder à ce recalcul, ajoutez le paramètre ``-x recompute-sensitivity=false`` lors de la mise à jour de la base de données avec la commande ``geonature db autoupgrade`` (lancée automatiquement par le script ``migration.sh``) : + + :: + + (venv)$ geonature db autoupgrade -x recompute-sensitivity=false + +* Le niveau de diffusion des données dans la synthèse est remis à ``NULL`` si celui-ci équivaut au niveau de sensibilité. Seuls les niveaux de diffusion qui différent sont laissés intacts. Si vous souhaitez rectifier vous-mêmes vos niveaux de diffusion et ne pas les remettre à ``NULL`` quand ils sont équivalents au niveau de sensibilité, vous pouvez ajouter le paramètre ``-x clear-diffusion-level=false`` lors de la mise à jour de la base de données : + + :: + + (venv)$ geonature db autoupgrade -x clear-diffusion-level=false + + Si vous redescendez à l’état antérieur de votre base de données, les niveaux de diffusion seront restaurés à partir du niveau de sensibilité ; vous pouvez éviter ceci avec ``-x restore-diffusion-level=false``. + + 2.9.1 (2022-01-27) ------------------ diff --git a/docs/versions-compatibility.rst b/docs/versions-compatibility.rst index 4fbdd72122..9e7da65926 100644 --- a/docs/versions-compatibility.rst +++ b/docs/versions-compatibility.rst @@ -3,6 +3,27 @@ COMPATIBILITE Versions fournies et testées des dépendances +GeoNature 2.9.2 +--------------- + ++----------------------------+---------+ +| Application / Module | Version | ++============================+=========+ +| TaxHub | 1.9.4 | ++----------------------------+---------+ +| UsersHub | 2.2.2 | ++----------------------------+---------+ +| Nomenclature-Api | 1.5.1 | ++----------------------------+---------+ +| Authentification-Api | 1.5.9 | ++----------------------------+---------+ +| Habref-Api | 0.3.0 | ++----------------------------+---------+ +| Utils-Flask-SQLAlchemy | 0.2.6 | ++----------------------------+---------+ +| Utils-Flask-SQLAlchemy-Geo | 0.2.2 | ++----------------------------+---------+ + GeoNature 2.9.1 --------------- diff --git a/frontend/src/app/GN2CommonModule/form/data-form.service.ts b/frontend/src/app/GN2CommonModule/form/data-form.service.ts index f2ab9c6f95..d80d0641e4 100644 --- a/frontend/src/app/GN2CommonModule/form/data-form.service.ts +++ b/frontend/src/app/GN2CommonModule/form/data-form.service.ts @@ -161,7 +161,6 @@ export class DataFormService { } getTaxaBibList() { - return Observable.of([]) return this._http.get(`${AppConfig.API_TAXHUB}/biblistes/`).map(d => d.data); } diff --git a/install/install_all/install_all.ini b/install/install_all/install_all.ini index a35625ca42..ad9e9b6ca9 100644 --- a/install/install_all/install_all.ini +++ b/install/install_all/install_all.ini @@ -32,7 +32,7 @@ usershub_release=2.2.2 ### CONFIGURATION GEONATURE ### # Version de GeoNature -geonature_release=2.9.1 +geonature_release=2.9.2 # Effacer la base de données GeoNature existante lors de la réinstallation drop_geonaturedb=false # Nom de la base de données GeoNature