diff --git a/backend/geonature/core/gn_meta/models/commons.py b/backend/geonature/core/gn_meta/models/commons.py index 5b02900c73..ff5e4a30dd 100644 --- a/backend/geonature/core/gn_meta/models/commons.py +++ b/backend/geonature/core/gn_meta/models/commons.py @@ -183,6 +183,24 @@ class CorDatasetProtocol(DB.Model): id_protocol = DB.Column(DB.Integer, ForeignKey("gn_meta.sinp_datatype_protocols.id_protocol")) +cor_dataset_objectif = db.Table( + "cor_dataset_objectif", + db.Column( + "id_dataset", + db.Integer, + ForeignKey("gn_meta.t_datasets.id_dataset"), + primary_key=True, + ), + db.Column( + "id_nomenclature_objectif", + db.Integer, + ForeignKey(TNomenclatures.id_nomenclature), + primary_key=True, + ), + schema="gn_meta", +) + + cor_dataset_territory = db.Table( "cor_dataset_territory", db.Column( diff --git a/backend/geonature/core/gn_meta/models/datasets.py b/backend/geonature/core/gn_meta/models/datasets.py index d6bf0605f2..41aeb0197d 100644 --- a/backend/geonature/core/gn_meta/models/datasets.py +++ b/backend/geonature/core/gn_meta/models/datasets.py @@ -50,11 +50,6 @@ class TDatasets(db.Model): keywords = DB.Column(DB.Unicode) marine_domain = DB.Column(DB.Boolean) terrestrial_domain = DB.Column(DB.Boolean) - id_nomenclature_dataset_objectif = DB.Column( - DB.Integer, - ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), - default=lambda: TNomenclatures.get_default_nomenclature("JDD_OBJECTIFS"), - ) bbox_west = DB.Column(DB.Float) bbox_east = DB.Column(DB.Float) bbox_south = DB.Column(DB.Float) @@ -102,10 +97,6 @@ class TDatasets(db.Model): TNomenclatures, foreign_keys=[id_nomenclature_data_type], ) - nomenclature_dataset_objectif = DB.relationship( - TNomenclatures, - foreign_keys=[id_nomenclature_dataset_objectif], - ) nomenclature_collecting_method = DB.relationship( TNomenclatures, foreign_keys=[id_nomenclature_collecting_method], @@ -123,6 +114,13 @@ class TDatasets(db.Model): foreign_keys=[id_nomenclature_resource_type], ) + cor_objectifs = DB.relationship( + TNomenclatures, + secondary=cor_dataset_objectif, + # TODO: check what is the use of the following / no reference to "objectif_dataset" needed elsewhere + backref=DB.backref("objectif_dataset"), + ) + cor_territories = DB.relationship( TNomenclatures, secondary=cor_dataset_territory, diff --git a/backend/geonature/core/gn_meta/routes.py b/backend/geonature/core/gn_meta/routes.py index 97bdc05c79..a635d4d85f 100644 --- a/backend/geonature/core/gn_meta/routes.py +++ b/backend/geonature/core/gn_meta/routes.py @@ -166,11 +166,11 @@ def get_dataset(scope, id_dataset): "cor_dataset_actor.role", "modules", "nomenclature_data_type", - "nomenclature_dataset_objectif", "nomenclature_collecting_method", "nomenclature_data_origin", "nomenclature_source_status", "nomenclature_resource_type", + "cor_objectifs", "cor_territories", "acquisition_framework", "acquisition_framework.creator", @@ -384,7 +384,8 @@ def my_csv_resp(filename, data, columns, _header, separator=";"): def datasetHandler(dataset, data): datasetSchema = DatasetSchema( - only=["cor_dataset_actor", "modules", "cor_territories"], unknown=EXCLUDE + only=["cor_dataset_actor", "modules", "cor_objectifs", "cor_territories"], + unknown=EXCLUDE, ) try: dataset = datasetSchema.load(data, instance=dataset) @@ -450,12 +451,12 @@ def get_export_pdf_dataset(id_dataset, scope): dataset_schema = DatasetSchema( only=[ "nomenclature_data_type", - "nomenclature_dataset_objectif", "nomenclature_collecting_method", "acquisition_framework", "cor_dataset_actor.nomenclature_actor_role", "cor_dataset_actor.organism", "cor_dataset_actor.role", + "cor_objectifs", ] ) dataset = dataset_schema.dump(dataset) diff --git a/backend/geonature/core/gn_meta/schemas.py b/backend/geonature/core/gn_meta/schemas.py index 81cc1f4ecc..043ed05475 100644 --- a/backend/geonature/core/gn_meta/schemas.py +++ b/backend/geonature/core/gn_meta/schemas.py @@ -59,11 +59,11 @@ class Meta: creator = MA.Nested(UserSchema, dump_only=True) nomenclature_data_type = MA.Nested(NomenclatureSchema, dump_only=True) - nomenclature_dataset_objectif = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_collecting_method = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_data_origin = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_source_status = MA.Nested(NomenclatureSchema, dump_only=True) nomenclature_resource_type = MA.Nested(NomenclatureSchema, dump_only=True) + cor_objectifs = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) cor_territories = MA.Nested(NomenclatureSchema, many=True, unknown=EXCLUDE) acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", dump_only=True) sources = MA.Nested(SourceSchema, many=True, dump_only=True) diff --git a/backend/geonature/core/gn_synthese/blueprints/synthese.py b/backend/geonature/core/gn_synthese/blueprints/synthese.py index ef968aa5e8..6e977b61fa 100644 --- a/backend/geonature/core/gn_synthese/blueprints/synthese.py +++ b/backend/geonature/core/gn_synthese/blueprints/synthese.py @@ -278,10 +278,13 @@ def get_one_synthese(permissions, id_synthese): "dataset.cor_dataset_actor", "dataset.cor_dataset_actor.role", "dataset.cor_dataset_actor.organism", + # TODO: verify if it necessary to have "dataset.cor_objectifs" + # Note: necessary to keep several PyTest tests successful as is + # but does not seem to be used anywhere in the frontend ... + # "dataset.cor_objectifs", "dataset.cor_territories", "dataset.nomenclature_source_status", "dataset.nomenclature_resource_type", - "dataset.nomenclature_dataset_objectif", "dataset.nomenclature_data_type", "dataset.nomenclature_data_origin", "dataset.nomenclature_collecting_method", diff --git a/backend/geonature/migrations/versions/ae0b6362fb22_add_table_gn_meta_cor_dataset_obje.py b/backend/geonature/migrations/versions/ae0b6362fb22_add_table_gn_meta_cor_dataset_obje.py new file mode 100644 index 0000000000..960c3144ea --- /dev/null +++ b/backend/geonature/migrations/versions/ae0b6362fb22_add_table_gn_meta_cor_dataset_obje.py @@ -0,0 +1,181 @@ +"""add table gn_meta.cor_dataset_objectif + +Revision ID: ae0b6362fb22 +Revises: cb663f039774 +Create Date: 2026-04-27 12:58:35.383337 + +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "ae0b6362fb22" +down_revision = "cb663f039774" +branch_labels = None +depends_on = None + + +def upgrade(): + # TODO: choose whether to add missing comments + # // Check what is made for `cor_acquisition_framework_objectif`, for which ome comments are missing + + # Create "new" table + # TODO: determine if, and which, a comment must be added + # // for `cor_acquisition_framework_objectif`: + # """ + # A dataset can have 0 or N "objectif(s)". Implement X.Y.Z SINP metadata standard : Objectif du jeu de données, tel que défini par la nomenclature [...] - [OBLIGATOIRE?] + # """ + # TODO: choose whether to explicit name "pk_cor_dataset_objectif" the PK + # vs. "cor_dataset_objectif_pkey" implied here because not specified + # // we have "pk_cor_acquisition_framework_objectif" for `cor_acquisition_framework_objectif` + op.create_table( + "cor_dataset_objectif", + sa.Column("id_dataset", sa.Integer, primary_key=True), + sa.Column("id_nomenclature_objectif", sa.Integer, primary_key=True), + schema="gn_meta", + ) + # TODO: verify if onupdate and ondelete actions are actually those wished + # [CASCADE for both for id_dataset, CASCADE for onupdate and NO ACTION for ondelete for objectif] is same as what is defined for cor_acquisition_framework_objectif + op.create_foreign_key( + "fk_cor_dataset_objectif_id_dataset", + source_schema="gn_meta", + source_table="cor_dataset_objectif", + local_cols=["id_dataset"], + referent_schema="gn_meta", + referent_table="t_datasets", + remote_cols=["id_dataset"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.create_foreign_key( + "fk_cor_dataset_objectif_id_nomenclature_objectif", + source_schema="gn_meta", + source_table="cor_dataset_objectif", + local_cols=["id_nomenclature_objectif"], + referent_schema="ref_nomenclatures", + referent_table="t_nomenclatures", + remote_cols=["id_nomenclature"], + onupdate="CASCADE", + ondelete="NO ACTION", + ) + + # Add constraint to ensure nomenclature type is "JDD_OBJECTIFS" + op.execute( + """ + ALTER TABLE gn_meta.cor_dataset_objectif + ADD CONSTRAINT check_cor_dataset_objectif + CHECK (ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_objectif, 'JDD_OBJECTIFS')) NOT VALID; + """ + ) + + # Insert data from "old" field - i.e. `gn_meta.t_datasets.id_nomenclature_dataset_objectif` - to "new" table + op.execute( + """ + INSERT INTO gn_meta.cor_dataset_objectif (id_dataset, id_nomenclature_objectif) + SELECT id_dataset, id_nomenclature_dataset_objectif FROM gn_meta.t_datasets; + """ + ) + + # TODO: choose whether to rather keep the "old" field as an archive + # Remove "old" field + op.drop_column("t_datasets", "id_nomenclature_dataset_objectif", schema="gn_meta") + + # TODO: choose whether to remove following, when no more needed, depending on : + # - Whether we eventually create the archive table in the downgrade ; + # - Whether just here for dev purpose. + # If gn_meta.cor_dataset_objectif_archive exists, copy it to gn_meta.cor_dataset_objectif + from sqlalchemy.engine.reflection import Inspector + + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + tables = inspector.get_table_names(schema="gn_meta") + if "cor_dataset_objectif_archive" in tables: + op.execute( + """ + INSERT INTO gn_meta.cor_dataset_objectif (id_dataset, id_nomenclature_objectif) + SELECT id_dataset, id_nomenclature_objectif + FROM gn_meta.cor_dataset_objectif_archive + WHERE NOT EXISTS ( + SELECT 1 + FROM gn_meta.cor_dataset_objectif + WHERE id_dataset = gn_meta.cor_dataset_objectif_archive.id_dataset + AND id_nomenclature_objectif = gn_meta.cor_dataset_objectif_archive.id_nomenclature_objectif + ); + """ + ) + + # TODO: remove following if we eventually do not create the archive table in the downgrade + # Remove the archive table possibly created through downgrade + # `if_exists=True` to avoid error if table actually does not exist, notably through first upgrade + op.drop_table("cor_dataset_objectif_archive", schema="gn_meta", if_exists=True) + + +def downgrade(): + # TODO: verify if following fully sticks to origin spec for this column - alternatives to verify: + # A. Inspect spec in "backend/geonature/migrations/data/core/meta.sql" // first Alembic rev of branch "geonature" + # > Done > added missing elements below: `server_default`, `COMMENT, `fk_t_datasets_objectif` and `check_t_datasets_objectif` + # TODO: test upgrading and downgrading with the additions + # TODO: verify is sufficient to only check this file, or there other subsequent rev affecting the column spec + # B. Check from a database instanciated from zero but without having a first upgrade of the current rev done + # TODO: choose wether to keep this add_column, depending on whether the column is dropped in the upgrade + # Create "old" field back + op.add_column( + "t_datasets", + sa.Column( + "id_nomenclature_dataset_objectif", + sa.Integer, + nullable=False, + server_default="ref_nomenclatures.get_default_nomenclature_value('JDD_OBJECTIFS')", + ), + schema="gn_meta", + ) + op.execute( + """ + COMMENT ON COLUMN gn_meta.t_datasets.id_nomenclature_dataset_objectif IS + 'Correspondance standard SINP = objectifJdd : Objectif du jeu de données tel que défini par la nomenclature ObjectifJeuDonneesValue - OBLIGATOIRE'; + """ + ) + op.execute( + """ + ALTER TABLE ONLY t_datasets + ADD CONSTRAINT fk_t_datasets_objectif FOREIGN KEY (id_nomenclature_dataset_objectif) + REFERENCES ref_nomenclatures.t_nomenclatures(id_nomenclature) ON UPDATE CASCADE; + """ + ) + op.execute( + """ + ALTER TABLE t_datasets + ADD CONSTRAINT check_t_datasets_objectif + CHECK (ref_nomenclatures.check_nomenclature_type_by_mnemonique(id_nomenclature_dataset_objectif,'JDD_OBJECTIFS')) NOT VALID; + """ + ) + + # TODO: choose whether to keep following: + # Make an archive of the "new" table + op.execute( + """ + CREATE TABLE gn_meta.cor_dataset_objectif_archive AS TABLE gn_meta.cor_dataset_objectif; + """ + ) + + # TODO: choose whether to rather - two alternatives: + # - Prompt the user before end of the downgrade to check: repopulate the "old" field from the "new" table, + # the user would then have to choose which objectif to keep whenever there are strictly more than 1 objectif associated to a dataset + # - Warn the user after the end of the downgrade to check: a posteriori from the archive of the "new" table + # Repopulate by arbitrarily picking the first objectif + # TODO: decide whether already specified values should be overwritten or not + op.execute( + """ + UPDATE gn_meta.t_datasets + SET id_nomenclature_dataset_objectif = ( + SELECT id_nomenclature_objectif + FROM gn_meta.cor_dataset_objectif + WHERE id_dataset = gn_meta.t_datasets.id_dataset + LIMIT 1 + ); + """ + ) + + # Remove the "new" table + op.drop_table("cor_dataset_objectif", schema="gn_meta") diff --git a/backend/geonature/templates/acquisition_framework_template_pdf.html b/backend/geonature/templates/acquisition_framework_template_pdf.html index 809d995d6d..3039000ec8 100644 --- a/backend/geonature/templates/acquisition_framework_template_pdf.html +++ b/backend/geonature/templates/acquisition_framework_template_pdf.html @@ -200,8 +200,10 @@

Objectifs

+ {% for obj in data.cor_objectifs -%} {% if loop.nextitem: %} + {{ obj['mnemonique'] }}, {% else %} {{ obj['mnemonique'] }} diff --git a/backend/geonature/templates/dataset_template_pdf.html b/backend/geonature/templates/dataset_template_pdf.html index 03bef2e0d4..f1e301d9de 100644 --- a/backend/geonature/templates/dataset_template_pdf.html +++ b/backend/geonature/templates/dataset_template_pdf.html @@ -111,12 +111,17 @@

{{data.title}}

+
-

Objectifs

+

Objectif(s)

- {% if data.nomenclature_dataset_objectif.label_default: %} - {{ data.nomenclature_dataset_objectif.label_default }} - {% endif %} + {% for obj in data.cor_objectifs -%} + {% if loop.nextitem: %} + {{ obj['label_default'] }},
+ {% else %} + {{ obj['label_default'] }} + {% endif %} + {%- endfor %}

diff --git a/frontend/src/app/metadataModule/af/af-form.component.html b/frontend/src/app/metadataModule/af/af-form.component.html index 04052834f9..f0680dfc1f 100644 --- a/frontend/src/app/metadataModule/af/af-form.component.html +++ b/frontend/src/app/metadataModule/af/af-form.component.html @@ -78,6 +78,10 @@

Cadre d'acquisition

+ Cadre d'acquisition > + Cadre d'acquisition data-qa="pnx-metadata-af-form-objectif" > + {{ 'MetaData.DatasetCard.DescriptiveForm' | translate }}
-
{{ 'Aims' | translate }}
-
- {{ dataset?.nomenclature_dataset_objectif.label_default || 'Non renseigné' }} +
{{ 'Objective(s)' | translate }}
+
+ {{ objectif?.label_default }}

diff --git a/frontend/src/app/metadataModule/datasets/dataset-form.component.html b/frontend/src/app/metadataModule/datasets/dataset-form.component.html index 7d1b38c755..79770a645a 100644 --- a/frontend/src/app/metadataModule/datasets/dataset-form.component.html +++ b/frontend/src/app/metadataModule/datasets/dataset-form.component.html @@ -158,9 +158,11 @@

{{ 'Dataset' | translate }}

diff --git a/frontend/src/app/metadataModule/services/af-form.service.ts b/frontend/src/app/metadataModule/services/af-form.service.ts index ad6dd12825..387dfde6ef 100644 --- a/frontend/src/app/metadataModule/services/af-form.service.ts +++ b/frontend/src/app/metadataModule/services/af-form.service.ts @@ -42,6 +42,9 @@ export class AcquisitionFrameworkFormService { //formate les donnés par défauts envoyées au formulaire return { is_parent: false, + /* TODO: verify if following is necessary or even useful: + Note: from a few manual tests with ds form page, it seems useless + */ cor_objectifs: [], cor_volets_sinp: [], cor_territories: [], diff --git a/frontend/src/app/metadataModule/services/dataset-form.service.ts b/frontend/src/app/metadataModule/services/dataset-form.service.ts index 40e5ca68f1..a63ff67d68 100644 --- a/frontend/src/app/metadataModule/services/dataset-form.service.ts +++ b/frontend/src/app/metadataModule/services/dataset-form.service.ts @@ -47,6 +47,10 @@ export class DatasetFormService { validable: true, active: true, modules: [], + /* TODO: verify if following is necessary or even useful: + Note: from a few manual tests with ds form page, it seems useless + */ + cor_objectifs: [], cor_territories: [], cor_dataset_actor: [{ id_nomenclature_actor_role: id_nomenclature }], }; @@ -65,7 +69,6 @@ export class DatasetFormService { keywords: null, terrestrial_domain: null, marine_domain: null, - id_nomenclature_dataset_objectif: [null, Validators.required], id_nomenclature_collecting_method: [null, Validators.required], id_nomenclature_data_origin: [null, Validators.required], id_nomenclature_source_status: [null, Validators.required], @@ -74,6 +77,7 @@ export class DatasetFormService { active: [null, Validators.required], id_taxa_list: null, modules: [[]], + cor_objectifs: [[], Validators.required], cor_territories: [[], Validators.required], cor_dataset_actor: this.fb.array( [], diff --git a/frontend/src/assets/i18n/en.json b/frontend/src/assets/i18n/en.json index dc833f9cf9..7c8d3d11a4 100644 --- a/frontend/src/assets/i18n/en.json +++ b/frontend/src/assets/i18n/en.json @@ -164,8 +164,7 @@ "Advanced": "Advanced", "All": "All", - "Aim": "Objective", - "Aims": "Objectives", + "Objective(s)": "Objective(s)", "Objectives": "Objectives", "Title": "Title", "Description": "Description", diff --git a/frontend/src/assets/i18n/fr.json b/frontend/src/assets/i18n/fr.json index ed81f9a9f4..93f76620af 100644 --- a/frontend/src/assets/i18n/fr.json +++ b/frontend/src/assets/i18n/fr.json @@ -164,8 +164,7 @@ "Advanced": "Avancé", "All": "Tous", - "Aim": "Objectif", - "Aims": "Objectifs", + "Objective(s)": "Objectif(s)", "Objectives": "Objectifs", "Title": "Titre", "Description": "Description",