diff --git a/.secrets.baseline b/.secrets.baseline
index 73d4ea1af..4ed962869 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -151,7 +151,7 @@
"filename": "core/settings.py",
"hashed_secret": "1ee34e26aeaf89c64ecc2c85efe6a961b75a50e9",
"is_verified": false,
- "line_number": 214
+ "line_number": 215
}
],
"docker-compose.yml": [
@@ -207,5 +207,5 @@
}
]
},
- "generated_at": "2025-01-06T12:01:37Z"
+ "generated_at": "2025-01-09T17:35:44Z"
}
diff --git a/core/settings.py b/core/settings.py
index 07243aca6..ef7b7ceb4 100644
--- a/core/settings.py
+++ b/core/settings.py
@@ -58,6 +58,7 @@
"core",
"qfdmd",
"qfdmo",
+ "data",
"corsheaders",
]
diff --git a/core/urls.py b/core/urls.py
index 03b434959..fd4de64b9 100644
--- a/core/urls.py
+++ b/core/urls.py
@@ -60,6 +60,7 @@ class PaginatedSitemap(GenericSitemap):
path("dsfr/", include(("dsfr_hacks.urls", "dsfr_hacks"), namespace="dsfr_hacks")),
path("", include(("qfdmo.urls", "qfdmo"), namespace="qfdmo")),
path("", include(("qfdmd.urls", "qfdmd"), namespace="qfdmd")),
+ path("", include(("data.urls", "data"), namespace="data")),
path("docs/", TemplateView.as_view(template_name="techdocs.html"), name="techdocs"),
]
diff --git a/dags/ingest_validated_dataset_to_db.py b/dags/ingest_validated_dataset_to_db.py
index f74130959..d4778d964 100755
--- a/dags/ingest_validated_dataset_to_db.py
+++ b/dags/ingest_validated_dataset_to_db.py
@@ -33,7 +33,7 @@ def _get_first_suggetsioncohorte_to_insert():
hook = PostgresHook(postgres_conn_id="qfdmo_django_db")
row = hook.get_first(
f"""
- SELECT * FROM qfdmo_suggestioncohorte
+ SELECT * FROM data_suggestioncohorte
WHERE statut = '{constants.SUGGESTION_ATRAITER}'
LIMIT 1
"""
@@ -54,7 +54,7 @@ def fetch_and_parse_data(**context):
df_sql = pd.read_sql_query(
f"""
- SELECT * FROM qfdmo_suggestionunitaire
+ SELECT * FROM data_suggestionunitaire
WHERE suggestion_cohorte_id = '{suggestion_cohorte_id}'
""",
engine,
@@ -91,7 +91,7 @@ def fetch_and_parse_data(**context):
normalized_dfs = df_acteur_to_delete["suggestion"].apply(pd.json_normalize)
df_actors_update_actor = pd.concat(normalized_dfs.tolist(), ignore_index=True)
status_repeated = (
- df_acteur_to_delete["status"]
+ df_acteur_to_delete["statut"]
.repeat(df_acteur_to_delete["suggestion"].apply(len))
.reset_index(drop=True)
)
diff --git a/dags/sources/config/airflow_params.py b/dags/sources/config/airflow_params.py
index bb25334bd..f98b03e79 100644
--- a/dags/sources/config/airflow_params.py
+++ b/dags/sources/config/airflow_params.py
@@ -25,6 +25,7 @@
clean_label_codes,
clean_siret_and_siren,
clean_telephone,
+ compute_location,
get_latlng_from_geopoint,
merge_and_clean_souscategorie_codes,
merge_sous_categories_columns,
@@ -63,6 +64,7 @@
"clean_url": clean_url,
"clean_souscategorie_codes_sinoe": clean_souscategorie_codes_sinoe,
"get_latlng_from_geopoint": get_latlng_from_geopoint,
+ "clean_location": compute_location,
}
diff --git a/dags/sources/dags/source_pyreo.py b/dags/sources/dags/source_pyreo.py
index 878012ad1..9ab41dc2c 100755
--- a/dags/sources/dags/source_pyreo.py
+++ b/dags/sources/dags/source_pyreo.py
@@ -80,6 +80,11 @@
# "value": [],
# },
# 4. Transformation du dataframe
+ {
+ "origin": ["latitude", "longitude"],
+ "transformation": "clean_location",
+ "destination": ["location"],
+ },
{
"origin": ["labels_etou_bonus", "acteur_type_code"],
"transformation": "clean_label_codes",
diff --git a/dags/sources/tasks/airflow_logic/db_data_prepare_task.py b/dags/sources/tasks/airflow_logic/db_data_prepare_task.py
index 005a6077a..8bb5093c4 100644
--- a/dags/sources/tasks/airflow_logic/db_data_prepare_task.py
+++ b/dags/sources/tasks/airflow_logic/db_data_prepare_task.py
@@ -28,6 +28,7 @@ def db_data_prepare_wrapper(**kwargs):
df_pssc = kwargs["ti"].xcom_pull(task_ids="propose_services_sous_categories")
df_labels = kwargs["ti"].xcom_pull(task_ids="propose_labels")
df_acteur_services = kwargs["ti"].xcom_pull(task_ids="propose_acteur_services")
+ df_acteurs_from_db = kwargs["ti"].xcom_pull(task_ids="db_read_acteur")
source_id_by_code = read_mapping_from_postgres(table_name="qfdmo_source")
acteurtype_id_by_code = read_mapping_from_postgres(table_name="qfdmo_acteurtype")
@@ -37,6 +38,7 @@ def db_data_prepare_wrapper(**kwargs):
log.preview("df_pssc", df_pssc)
log.preview("df_labels", df_labels)
log.preview("df_acteur_services", df_acteur_services)
+ log.preview("df_acteurs_from_db", df_acteurs_from_db)
log.preview("source_id_by_code", source_id_by_code)
log.preview("acteurtype_id_by_code", acteurtype_id_by_code)
@@ -47,6 +49,7 @@ def db_data_prepare_wrapper(**kwargs):
df_pssc=df_pssc,
df_labels=df_labels,
df_acteur_services=df_acteur_services,
+ df_acteurs_from_db=df_acteurs_from_db,
source_id_by_code=source_id_by_code,
acteurtype_id_by_code=acteurtype_id_by_code,
)
diff --git a/dags/sources/tasks/airflow_logic/db_write_suggestion_task.py b/dags/sources/tasks/airflow_logic/db_write_suggestion_task.py
new file mode 100644
index 000000000..f372d0e0b
--- /dev/null
+++ b/dags/sources/tasks/airflow_logic/db_write_suggestion_task.py
@@ -0,0 +1,49 @@
+import logging
+
+from airflow import DAG
+from airflow.operators.python import PythonOperator
+from sources.tasks.business_logic.db_write_suggestion import db_write_suggestion
+from utils import logging_utils as log
+
+logger = logging.getLogger(__name__)
+
+
+def db_write_suggestion_task(dag: DAG) -> PythonOperator:
+ return PythonOperator(
+ task_id="db_write_suggestion",
+ python_callable=db_write_suggestion_wrapper,
+ dag=dag,
+ )
+
+
+def db_write_suggestion_wrapper(**kwargs) -> None:
+ dag_name = kwargs["dag"].dag_display_name or kwargs["dag"].dag_id
+ run_id = kwargs["run_id"]
+ dfs_acteur = kwargs["ti"].xcom_pull(task_ids="db_data_prepare")
+ df_acteur_to_delete = dfs_acteur["df_acteur_to_delete"]
+ df_acteur_to_create = dfs_acteur["df_acteur_to_create"]
+ df_acteur_to_update = dfs_acteur["df_acteur_to_update"]
+
+ log.preview("dag_name", dag_name)
+ log.preview("run_id", run_id)
+ log.preview("df_acteur_to_delete", df_acteur_to_delete)
+ log.preview("df_acteur_to_create", df_acteur_to_create)
+ log.preview("df_acteur_to_update", df_acteur_to_update)
+
+ if (
+ df_acteur_to_create.empty
+ and df_acteur_to_delete.empty
+ and df_acteur_to_update.empty
+ ):
+ logger.warning("!!! Aucune suggestion à traiter pour cette source !!!")
+ # set the task to airflow skip status
+ kwargs["ti"].xcom_push(key="skip", value=True)
+ return
+
+ return db_write_suggestion(
+ dag_name=dag_name,
+ run_id=run_id,
+ df_acteur_to_create=df_acteur_to_create,
+ df_acteur_to_delete=df_acteur_to_delete,
+ df_acteur_to_update=df_acteur_to_update,
+ )
diff --git a/dags/sources/tasks/airflow_logic/operators.py b/dags/sources/tasks/airflow_logic/operators.py
index d37098470..a17e0052e 100755
--- a/dags/sources/tasks/airflow_logic/operators.py
+++ b/dags/sources/tasks/airflow_logic/operators.py
@@ -2,12 +2,14 @@
from airflow import DAG
from airflow.models.baseoperator import chain
-from shared.tasks.airflow_logic.write_data_task import write_data_task
from sources.tasks.airflow_logic.db_data_prepare_task import db_data_prepare_task
from sources.tasks.airflow_logic.db_read_acteur_task import db_read_acteur_task
from sources.tasks.airflow_logic.db_read_propositions_max_id_task import (
db_read_propositions_max_id_task,
)
+from sources.tasks.airflow_logic.db_write_suggestion_task import (
+ db_write_suggestion_task,
+)
from sources.tasks.airflow_logic.propose_acteur_changes_task import (
propose_acteur_changes_task,
)
@@ -91,5 +93,5 @@ def eo_task_chain(dag: DAG) -> None:
create_tasks,
propose_services_sous_categories_task(dag),
db_data_prepare_task(dag),
- write_data_task(dag),
+ db_write_suggestion_task(dag),
)
diff --git a/dags/sources/tasks/airflow_logic/propose_acteur_changes_task.py b/dags/sources/tasks/airflow_logic/propose_acteur_changes_task.py
index fe9e05d14..68bcf70e1 100644
--- a/dags/sources/tasks/airflow_logic/propose_acteur_changes_task.py
+++ b/dags/sources/tasks/airflow_logic/propose_acteur_changes_task.py
@@ -13,18 +13,13 @@ def propose_acteur_changes_task(dag: DAG) -> PythonOperator:
def propose_acteur_changes_wrapper(**kwargs):
- df = kwargs["ti"].xcom_pull(task_ids="source_data_normalize")
- df_acteurs = kwargs["ti"].xcom_pull(task_ids="db_read_acteur")
+ df_acteur = kwargs["ti"].xcom_pull(task_ids="source_data_normalize")
+ df_acteur_from_db = kwargs["ti"].xcom_pull(task_ids="db_read_acteur")
- params = kwargs["params"]
- column_to_drop = params.get("column_to_drop", [])
-
- log.preview("df (source_data_normalize)", df)
- log.preview("df_acteurs", df_acteurs)
- log.preview("column_to_drop", column_to_drop)
+ log.preview("df (source_data_normalize)", df_acteur)
+ log.preview("df_acteurs", df_acteur_from_db)
return propose_acteur_changes(
- df=df,
- df_acteurs=df_acteurs,
- column_to_drop=column_to_drop,
+ df_acteur=df_acteur,
+ df_acteur_from_db=df_acteur_from_db,
)
diff --git a/dags/sources/tasks/business_logic/db_data_prepare.py b/dags/sources/tasks/business_logic/db_data_prepare.py
index 6d577ef19..98e804a49 100644
--- a/dags/sources/tasks/business_logic/db_data_prepare.py
+++ b/dags/sources/tasks/business_logic/db_data_prepare.py
@@ -14,22 +14,26 @@ def db_data_prepare(
df_pssc: pd.DataFrame,
df_labels: pd.DataFrame,
df_acteur_services: pd.DataFrame,
+ df_acteurs_from_db: pd.DataFrame,
source_id_by_code: dict,
acteurtype_id_by_code: dict,
):
+
update_actors_columns = ["identifiant_unique", "statut", "cree_le"]
df_acteur_to_delete["suggestion"] = df_acteur_to_delete[
update_actors_columns
].apply(lambda row: json.dumps(row.to_dict(), default=str), axis=1)
# Created or updated Acteurs
- df_acteur_services = (
- df_acteur_services
- if df_acteur_services is not None
- else pd.DataFrame(columns=["acteur_id", "acteurservice_id"])
- )
+ # df_acteur_services = (
+ # df_acteur_services
+ # if df_acteur_services is not None
+ # else pd.DataFrame(columns=["acteur_id", "acteurservice_id"])
+ # )
if df_acteur.empty:
- raise ValueError("df_actors est vide")
+ raise ValueError("df_acteur est vide")
+ if df_acteur_services.empty:
+ raise ValueError("df_acteur_services est vide")
if df_ps.empty:
raise ValueError("df_ps est vide")
if df_pssc.empty:
@@ -41,6 +45,8 @@ def db_data_prepare(
acteurtype_id_by_code
)
+ # FIXME: A bouger dans un tache compute_ps qui remplacera propose_services et
+ # propose_services_sous_categories
aggregated_pdsc = (
df_pssc.groupby("propositionservice_id")
.apply(lambda x: x.to_dict("records") if not x.empty else [])
@@ -57,11 +63,9 @@ def db_data_prepare(
df_pds_joined["propositionservice_id"] = df_pds_joined[
"propositionservice_id"
].astype(str)
-
df_pds_joined["pds_sous_categories"] = df_pds_joined["pds_sous_categories"].apply(
lambda x: x if isinstance(x, list) else []
)
-
df_pds_joined.drop("id", axis=1, inplace=True)
aggregated_pds = (
@@ -128,7 +132,20 @@ def db_data_prepare(
lambda row: json.dumps(row.to_dict(), default=str), axis=1
)
df_joined.drop_duplicates("identifiant_unique", keep="first", inplace=True)
- log.preview("df_joined", df_joined)
+
+ df_acteur_to_create = df_joined[
+ ~df_joined["identifiant_unique"].isin(df_acteurs_from_db["identifiant_unique"])
+ ]
+ df_acteur_to_update = df_joined[
+ df_joined["identifiant_unique"].isin(df_acteurs_from_db["identifiant_unique"])
+ ]
+
+ log.preview("df_acteur_to_create", df_acteur_to_create)
+ log.preview("df_acteur_to_update", df_acteur_to_update)
log.preview("df_acteur_to_delete", df_acteur_to_delete)
- return {"all": {"df": df_joined}, "to_disable": {"df": df_acteur_to_delete}}
+ return {
+ "df_acteur_to_create": df_acteur_to_create,
+ "df_acteur_to_update": df_acteur_to_update,
+ "df_acteur_to_delete": df_acteur_to_delete,
+ }
diff --git a/dags/sources/tasks/business_logic/db_write_suggestion.py b/dags/sources/tasks/business_logic/db_write_suggestion.py
new file mode 100644
index 000000000..a1f60e8f4
--- /dev/null
+++ b/dags/sources/tasks/business_logic/db_write_suggestion.py
@@ -0,0 +1,95 @@
+import json
+import logging
+from datetime import datetime
+
+import pandas as pd
+from shared.tasks.database_logic.db_manager import PostgresConnectionManager
+from sources.config import shared_constants as constants
+
+logger = logging.getLogger(__name__)
+
+
+def db_write_suggestion(
+ dag_name: str,
+ run_id: str,
+ df_acteur_to_create: pd.DataFrame,
+ df_acteur_to_delete: pd.DataFrame,
+ df_acteur_to_update: pd.DataFrame,
+):
+
+ metadata = {}
+
+ run_name = run_id.replace("__", " - ")
+
+ insert_suggestion(
+ df=df_acteur_to_create,
+ metadata=metadata,
+ dag_name=f"{dag_name} - AJOUT",
+ run_name=run_name,
+ action_type=constants.SUGGESTION_SOURCE_AJOUT,
+ )
+ insert_suggestion(
+ df=df_acteur_to_delete,
+ metadata=metadata,
+ dag_name=f"{dag_name} - SUPRESSION",
+ run_name=run_name,
+ action_type=constants.SUGGESTION_SOURCE_SUPRESSION,
+ )
+ insert_suggestion(
+ df=df_acteur_to_update,
+ metadata=metadata,
+ dag_name=f"{dag_name} - MISES A JOUR",
+ run_name=run_name,
+ action_type=constants.SUGGESTION_SOURCE_MISESAJOUR,
+ )
+
+
+def insert_suggestion(
+ df: pd.DataFrame, metadata: dict, dag_name: str, run_name: str, action_type: str
+):
+ if df.empty:
+ return
+ engine = PostgresConnectionManager().engine
+ current_date = datetime.now()
+
+ with engine.connect() as conn:
+ # Insert a new suggestion
+ result = conn.execute(
+ """
+ INSERT INTO data_suggestioncohorte
+ (
+ identifiant_action,
+ identifiant_execution,
+ type_action,
+ statut,
+ metadata,
+ cree_le,
+ modifie_le
+ )
+ VALUES (%s, %s, %s, %s, %s, %s, %s)
+ RETURNING ID;
+ """,
+ (
+ dag_name,
+ run_name,
+ action_type, # FIXME: spécialiser les sources
+ constants.SUGGESTION_AVALIDER,
+ json.dumps(metadata),
+ current_date,
+ current_date,
+ ),
+ )
+ suggestion_cohorte_id = result.fetchone()[0]
+
+ # Insert dag_run_change
+ df["type_action"] = action_type
+ df["suggestion_cohorte_id"] = suggestion_cohorte_id
+ df["statut"] = constants.SUGGESTION_AVALIDER
+ df[["suggestion", "suggestion_cohorte_id", "type_action", "statut"]].to_sql(
+ "data_suggestionunitaire",
+ engine,
+ if_exists="append",
+ index=False,
+ method="multi",
+ chunksize=1000,
+ )
diff --git a/dags/sources/tasks/business_logic/propose_acteur_changes.py b/dags/sources/tasks/business_logic/propose_acteur_changes.py
index 4171c9942..4e459d51d 100644
--- a/dags/sources/tasks/business_logic/propose_acteur_changes.py
+++ b/dags/sources/tasks/business_logic/propose_acteur_changes.py
@@ -3,58 +3,41 @@
import numpy as np
import pandas as pd
-from utils.base_utils import transform_location
-from utils.mapping_utils import parse_float
logger = logging.getLogger(__name__)
def propose_acteur_changes(
- df: pd.DataFrame,
- df_acteurs: pd.DataFrame,
- column_to_drop: list = [],
+ df_acteur: pd.DataFrame,
+ df_acteur_from_db: pd.DataFrame,
):
-
- # TODO: à déplacer dans la source_data_normalize
- # intersection of columns in df and column_to_drop
- column_to_drop = list(set(column_to_drop) & set(df.columns))
- df = df.drop(column_to_drop, axis=1)
-
- if "latitude" in df.columns and "longitude" in df.columns:
- df["latitude"] = df["latitude"].apply(parse_float)
- df["longitude"] = df["longitude"].apply(parse_float)
- df["location"] = df.apply(
- lambda row: transform_location(row["longitude"], row["latitude"]),
- axis=1,
- )
-
# On garde le cree_le de qfdmo_acteur
- df.drop(columns=["cree_le"], inplace=True, errors="ignore")
- df = df.merge(
- df_acteurs[["identifiant_unique", "cree_le"]],
+ df_acteur.drop(columns=["cree_le"], inplace=True, errors="ignore")
+ df_acteur = df_acteur.merge(
+ df_acteur_from_db[["identifiant_unique", "cree_le"]],
on="identifiant_unique",
how="left",
)
- df["cree_le"] = df["cree_le"].fillna(datetime.now())
+ df_acteur["cree_le"] = df_acteur["cree_le"].fillna(datetime.now())
# On met à jour le modifie_le de qfdmo_acteur
- df["modifie_le"] = datetime.now()
+ df_acteur["modifie_le"] = datetime.now()
- df = df.replace({np.nan: None})
+ df_acteur = df_acteur.replace({np.nan: None})
- duplicates_mask = df.duplicated("identifiant_unique", keep=False)
- duplicate_ids = df.loc[duplicates_mask, "identifiant_unique"].unique()
+ duplicates_mask = df_acteur.duplicated("identifiant_unique", keep=False)
+ duplicate_ids = df_acteur.loc[duplicates_mask, "identifiant_unique"].unique()
number_of_duplicates = len(duplicate_ids)
metadata = {
"number_of_duplicates": number_of_duplicates,
"duplicate_ids": list(duplicate_ids),
- "acteurs_to_add_or_update": len(df),
+ "acteurs_to_add_or_update": len(df_acteur),
}
- df = df.drop_duplicates(subset="identifiant_unique", keep="first")
- df["event"] = "CREATE"
+ df_acteur = df_acteur.drop_duplicates(subset="identifiant_unique", keep="first")
+ df_acteur["event"] = "CREATE"
return {
- "df": df,
+ "df": df_acteur,
"metadata": metadata,
}
diff --git a/dags/sources/tasks/business_logic/propose_acteur_to_delete.py b/dags/sources/tasks/business_logic/propose_acteur_to_delete.py
index 64a667c2c..c8e800f99 100644
--- a/dags/sources/tasks/business_logic/propose_acteur_to_delete.py
+++ b/dags/sources/tasks/business_logic/propose_acteur_to_delete.py
@@ -24,6 +24,10 @@ def propose_acteur_to_delete(
df_acteur_to_delete["statut"] = "SUPPRIME"
df_acteur_to_delete["event"] = "UPDATE_ACTOR"
+ # FIXME: ajouter le contexte de la suppression
+ # ajouter une colonne context avec le contenu de df_acteurs_for_db en json pour
+ # chaque colonne en jonction sur identifiant_unique
+
return {
"metadata": {"number_of_removed_actors": len(df_acteur_to_delete)},
"df_acteur_to_delete": df_acteur_to_delete,
diff --git a/dags/sources/tasks/business_logic/source_data_normalize.py b/dags/sources/tasks/business_logic/source_data_normalize.py
index 8c4faa99c..7194ffcf7 100755
--- a/dags/sources/tasks/business_logic/source_data_normalize.py
+++ b/dags/sources/tasks/business_logic/source_data_normalize.py
@@ -187,11 +187,7 @@ def source_data_normalize(
# TODO: Remplacer par le dag_id
if dag_id == "sinoe":
- df = df_normalize_sinoe(
- df,
- product_mapping=dag_config.product_mapping,
- dechet_mapping=dag_config.dechet_mapping,
- )
+ df = df_normalize_sinoe(df)
# Merge et suppression des lignes indésirables
df = _remove_undesired_lines(df, dag_config)
@@ -201,21 +197,6 @@ def source_data_normalize(
raise ValueError("Plus aucune donnée disponible après normalisation")
return df
- # # TODO: Je n'ai pas vu la source qui applique cette règle
- # if "statut" in df.columns:
- # df["statut"] = df["statut"].map(
- # {
- # 1: constants.ACTEUR_ACTIF,
- # 0: constants.ACTEUR_SUPPRIME,
- # constants.ACTEUR_ACTIF: constants.ACTEUR_ACTIF,
- # "INACTIF": constants.ACTEUR_INACTIF,
- # "SUPPRIME": constants.ACTEUR_SUPPRIME,
- # }
- # )
- # df["statut"] = df["statut"].fillna(constants.ACTEUR_ACTIF)
- # else:
- # df["statut"] = constants.ACTEUR_ACTIF
-
def df_normalize_pharmacie(df: pd.DataFrame) -> pd.DataFrame:
# FIXME : à déplacer dans une fonction df ?
@@ -234,8 +215,6 @@ def df_normalize_pharmacie(df: pd.DataFrame) -> pd.DataFrame:
def df_normalize_sinoe(
df: pd.DataFrame,
- product_mapping: dict,
- dechet_mapping: dict,
) -> pd.DataFrame:
# DOUBLONS: extra sécurité: même si on ne devrait pas obtenir
diff --git a/dags/sources/tasks/transform/transform_df.py b/dags/sources/tasks/transform/transform_df.py
index 2bbe93441..507d144d5 100644
--- a/dags/sources/tasks/transform/transform_df.py
+++ b/dags/sources/tasks/transform/transform_df.py
@@ -11,7 +11,9 @@
clean_siren,
clean_siret,
)
+from utils.base_utils import transform_location
from utils.formatter import format_libelle_to_code
+from utils.mapping_utils import parse_float
logger = logging.getLogger(__name__)
@@ -218,6 +220,17 @@ def get_latlng_from_geopoint(row: pd.Series, _) -> pd.Series:
return row[["latitude", "longitude"]]
+def compute_location(row: pd.Series, _):
+ # FIXME : tests à déplacer
+ # first column is latitude, second is longitude
+ lat_column = row.keys()[0]
+ lng_column = row.keys()[1]
+ row[lat_column] = parse_float(row[lat_column])
+ row[lng_column] = parse_float(row[lng_column])
+ row["location"] = transform_location(row[lng_column], row[lat_column])
+ return row[["location"]]
+
+
### Fonctions de résolution de l'adresse au format BAN et avec vérification via l'API
# adresse.data.gouv.fr en option
# TODO : A déplacer ?
diff --git a/dags/utils/dag_eo_utils.py b/dags/utils/dag_eo_utils.py
index 6c5b5b090..f55c1dcba 100755
--- a/dags/utils/dag_eo_utils.py
+++ b/dags/utils/dag_eo_utils.py
@@ -13,16 +13,11 @@ def insert_suggestion_and_process_df(df_acteur_updates, metadata, dag_name, run_
return
engine = PostgresConnectionManager().engine
current_date = datetime.now()
- logger.warning(dag_name)
- logger.warning(run_name)
- logger.warning(constants.SUGGESTION_SOURCE)
- logger.warning(constants.SUGGESTION_ATRAITER)
- logger.warning(json.dumps(metadata))
with engine.connect() as conn:
# Insert a new suggestion
result = conn.execute(
"""
- INSERT INTO qfdmo_suggestioncohorte
+ INSERT INTO data_suggestioncohorte
(
identifiant_action,
identifiant_execution,
@@ -39,7 +34,7 @@ def insert_suggestion_and_process_df(df_acteur_updates, metadata, dag_name, run_
dag_name,
run_name,
constants.SUGGESTION_SOURCE, # FIXME: spécialiser les sources
- constants.SUGGESTION_ATRAITER,
+ constants.SUGGESTION_AVALIDER,
json.dumps(metadata),
current_date,
current_date,
@@ -50,11 +45,11 @@ def insert_suggestion_and_process_df(df_acteur_updates, metadata, dag_name, run_
# Insert dag_run_change
df_acteur_updates["type_action"] = df_acteur_updates["event"]
df_acteur_updates["suggestion_cohorte_id"] = suggestion_cohorte_id
- df_acteur_updates["statut"] = constants.SUGGESTION_ATRAITER
+ df_acteur_updates["statut"] = constants.SUGGESTION_AVALIDER
df_acteur_updates[
["suggestion", "suggestion_cohorte_id", "type_action", "statut"]
].to_sql(
- "qfdmo_suggestionunitaire",
+ "data_suggestionunitaire",
engine,
if_exists="append",
index=False,
diff --git a/dags/utils/dag_ingest_validated_utils.py b/dags/utils/dag_ingest_validated_utils.py
index 7bdc2126c..db24171ab 100755
--- a/dags/utils/dag_ingest_validated_utils.py
+++ b/dags/utils/dag_ingest_validated_utils.py
@@ -315,6 +315,6 @@ def update_dag_run_status(
connection, dag_run_id, statut=shared_constants.SUGGESTION_SUCCES
):
query = f"""
- UPDATE qfdmo_suggestioncohorte SET statut = '{statut}' WHERE id = {dag_run_id}
+ UPDATE data_suggestioncohorte SET statut = '{statut}' WHERE id = {dag_run_id}
"""
connection.execute(query)
diff --git a/dags_unit_tests/sources/tasks/business_logic/test_propose_acteur_changes.py b/dags_unit_tests/sources/tasks/business_logic/test_propose_acteur_changes.py
index 5ec9808c7..87dc5c50b 100644
--- a/dags_unit_tests/sources/tasks/business_logic/test_propose_acteur_changes.py
+++ b/dags_unit_tests/sources/tasks/business_logic/test_propose_acteur_changes.py
@@ -61,8 +61,8 @@ def test_create_actors_cree_le(
expected_cree_le,
):
result = propose_acteur_changes(
- df=df_data_from_api,
- df_acteurs=df_acteur,
+ df_acteur=df_data_from_api,
+ df_acteur_from_db=df_acteur,
)
df_result = result["df"]
@@ -87,14 +87,14 @@ def test_create_actors_location(
longitude,
):
result = propose_acteur_changes(
- df=pd.DataFrame(
+ df_acteur=pd.DataFrame(
{
"identifiant_unique": ["1"],
"latitude": [latitude],
"longitude": [longitude],
}
),
- df_acteurs=df_empty_acteurs_from_db,
+ df_acteur_from_db=df_empty_acteurs_from_db,
)
df_result = result["df"]
diff --git a/data/__init__.py b/data/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/qfdmo/admin/data.py b/data/admin.py
similarity index 82%
rename from qfdmo/admin/data.py
rename to data/admin.py
index 14a7d9870..11b18b4dd 100644
--- a/qfdmo/admin/data.py
+++ b/data/admin.py
@@ -1,6 +1,6 @@
from django.contrib.gis import admin
-from qfdmo.models import SuggestionCohorte, SuggestionUnitaire
+from data.models import SuggestionCohorte, SuggestionUnitaire
class SuggestionCohorteAdmin(admin.ModelAdmin):
diff --git a/data/apps.py b/data/apps.py
new file mode 100644
index 000000000..b882be950
--- /dev/null
+++ b/data/apps.py
@@ -0,0 +1,6 @@
+from django.apps import AppConfig
+
+
+class DataConfig(AppConfig):
+ default_auto_field = "django.db.models.BigAutoField"
+ name = "data"
diff --git a/data/forms.py b/data/forms.py
new file mode 100644
index 000000000..9ab32ea91
--- /dev/null
+++ b/data/forms.py
@@ -0,0 +1,18 @@
+from django import forms
+
+from data.models import SuggestionCohorte, SuggestionStatut
+
+
+class SuggestionCohorteForm(forms.Form):
+ suggestion_cohorte = forms.ModelChoiceField(
+ label="Séléctionner l'execution d'un DAG",
+ widget=forms.Select(
+ attrs={
+ "class": "fr-select",
+ }
+ ),
+ queryset=SuggestionCohorte.objects.filter(
+ statut=SuggestionStatut.AVALIDER.value
+ ),
+ required=True,
+ )
diff --git a/data/migrations/0001_initial.py b/data/migrations/0001_initial.py
new file mode 100644
index 000000000..e12402e22
--- /dev/null
+++ b/data/migrations/0001_initial.py
@@ -0,0 +1,225 @@
+# Generated by Django 5.1.4 on 2025-01-09 14:04
+
+import django.contrib.gis.db.models.fields
+import django.core.validators
+import django.db.models.deletion
+import django.db.models.functions.datetime
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = []
+
+ operations = [
+ migrations.CreateModel(
+ name="BANCache",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("adresse", models.CharField(blank=True, max_length=255, null=True)),
+ (
+ "code_postal",
+ models.CharField(blank=True, max_length=255, null=True),
+ ),
+ ("ville", models.CharField(blank=True, max_length=255, null=True)),
+ (
+ "location",
+ django.contrib.gis.db.models.fields.PointField(
+ blank=True, null=True, srid=4326
+ ),
+ ),
+ ("ban_returned", models.JSONField(blank=True, null=True)),
+ (
+ "modifie_le",
+ models.DateTimeField(
+ auto_now=True,
+ db_default=django.db.models.functions.datetime.Now(),
+ ),
+ ),
+ ],
+ options={
+ "verbose_name": "Cache BAN",
+ "verbose_name_plural": "Cache BAN",
+ },
+ ),
+ migrations.CreateModel(
+ name="SuggestionCohorte",
+ fields=[
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "identifiant_action",
+ models.CharField(
+ help_text="Identifiant de l'action (ex : dag_id pour Airflow)",
+ max_length=250,
+ ),
+ ),
+ (
+ "identifiant_execution",
+ models.CharField(
+ help_text="Identifiant de l'execution (ex : run_id pour Airflow)",
+ max_length=250,
+ ),
+ ),
+ (
+ "type_action",
+ models.CharField(
+ blank=True,
+ choices=[
+ ("CLUSTERING", "regroupement/déduplication des acteurs"),
+ ("SOURCE", "ingestion de source de données"),
+ (
+ "SOURCE_AJOUT",
+ "ingestion de source de données - nouveau acteur",
+ ),
+ (
+ "SOURCE_MISESAJOUR",
+ "ingestion de source de données - modification d'acteur existant",
+ ),
+ ("SOURCE_SUPRESSION", "ingestion de source de données"),
+ ("ENRICHISSEMENT", "suggestion d'enrichissement"),
+ ],
+ max_length=250,
+ ),
+ ),
+ (
+ "statut",
+ models.CharField(
+ choices=[
+ ("AVALIDER", "À valider"),
+ ("REJETER", "Rejeter"),
+ ("ATRAITER", "À traiter"),
+ ("ENCOURS", "En cours de traitement"),
+ ("ERREUR", "Fini en erreur"),
+ ("PARTIEL", "Fini avec succès partiel"),
+ ("SUCCES", "Fini avec succès"),
+ ],
+ default="AVALIDER",
+ max_length=50,
+ ),
+ ),
+ (
+ "metadata",
+ models.JSONField(
+ blank=True,
+ help_text="Metadata de la cohorte, données statistiques",
+ null=True,
+ ),
+ ),
+ (
+ "pourcentage_erreurs_tolerees",
+ models.IntegerField(
+ db_default=0,
+ default=0,
+ help_text="Nombre d'erreurs tolérées en pourcentage",
+ validators=[
+ django.core.validators.MinValueValidator(0),
+ django.core.validators.MaxValueValidator(100),
+ ],
+ ),
+ ),
+ (
+ "cree_le",
+ models.DateTimeField(
+ auto_now_add=True,
+ db_default=django.db.models.functions.datetime.Now(),
+ ),
+ ),
+ (
+ "modifie_le",
+ models.DateTimeField(
+ auto_now=True,
+ db_default=django.db.models.functions.datetime.Now(),
+ ),
+ ),
+ ],
+ ),
+ migrations.CreateModel(
+ name="SuggestionUnitaire",
+ fields=[
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "type_action",
+ models.CharField(
+ blank=True,
+ choices=[
+ ("CLUSTERING", "regroupement/déduplication des acteurs"),
+ ("SOURCE", "ingestion de source de données"),
+ (
+ "SOURCE_AJOUT",
+ "ingestion de source de données - nouveau acteur",
+ ),
+ (
+ "SOURCE_MISESAJOUR",
+ "ingestion de source de données - modification d'acteur existant",
+ ),
+ ("SOURCE_SUPRESSION", "ingestion de source de données"),
+ ("ENRICHISSEMENT", "suggestion d'enrichissement"),
+ ],
+ max_length=250,
+ ),
+ ),
+ (
+ "statut",
+ models.CharField(
+ choices=[
+ ("AVALIDER", "À valider"),
+ ("REJETER", "Rejeter"),
+ ("ATRAITER", "À traiter"),
+ ("ENCOURS", "En cours de traitement"),
+ ("ERREUR", "Fini en erreur"),
+ ("PARTIEL", "Fini avec succès partiel"),
+ ("SUCCES", "Fini avec succès"),
+ ],
+ default="AVALIDER",
+ max_length=50,
+ ),
+ ),
+ (
+ "context",
+ models.JSONField(
+ blank=True,
+ help_text="Contexte de la suggestion : données initiales",
+ null=True,
+ ),
+ ),
+ (
+ "suggestion",
+ models.JSONField(
+ blank=True, help_text="Suggestion de modification"
+ ),
+ ),
+ (
+ "cree_le",
+ models.DateTimeField(
+ auto_now_add=True,
+ db_default=django.db.models.functions.datetime.Now(),
+ ),
+ ),
+ (
+ "modifie_le",
+ models.DateTimeField(
+ auto_now=True,
+ db_default=django.db.models.functions.datetime.Now(),
+ ),
+ ),
+ (
+ "suggestion_cohorte",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="suggestion_unitaires",
+ to="data.suggestioncohorte",
+ ),
+ ),
+ ],
+ ),
+ ]
diff --git a/data/migrations/__init__.py b/data/migrations/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/qfdmo/models/data.py b/data/models.py
similarity index 94%
rename from qfdmo/models/data.py
rename to data/models.py
index a13448a4d..61c6e4abb 100644
--- a/qfdmo/models/data.py
+++ b/data/models.py
@@ -1,4 +1,5 @@
from django.contrib.gis.db import models
+from django.core.validators import MaxValueValidator, MinValueValidator
from django.db.models.functions import Now
from dags.sources.config.shared_constants import (
@@ -6,6 +7,7 @@
SUGGESTION_AVALIDER,
SUGGESTION_CLUSTERING,
SUGGESTION_ENCOURS,
+ SUGGESTION_ENRICHISSEMENT,
SUGGESTION_ERREUR,
SUGGESTION_PARTIEL,
SUGGESTION_REJETER,
@@ -43,8 +45,7 @@ class SuggestionAction(models.TextChoices):
"ingestion de source de données - modification d'acteur existant",
)
SOURCE_SUPPRESSION = SUGGESTION_SOURCE_SUPRESSION, "ingestion de source de données"
- # A venir
- # ENRICHISSEMENT…
+ SOURCE_ENRICHISSEMENT = SUGGESTION_ENRICHISSEMENT, "suggestion d'enrichissement"
class SuggestionCohorte(models.Model):
@@ -71,6 +72,12 @@ class SuggestionCohorte(models.Model):
metadata = models.JSONField(
null=True, blank=True, help_text="Metadata de la cohorte, données statistiques"
)
+ pourcentage_erreurs_tolerees = models.IntegerField(
+ default=0,
+ db_default=0,
+ help_text="Nombre d'erreurs tolérées en pourcentage",
+ validators=[MinValueValidator(0), MaxValueValidator(100)],
+ )
cree_le = models.DateTimeField(auto_now_add=True, db_default=Now())
modifie_le = models.DateTimeField(auto_now=True, db_default=Now())
diff --git a/data/urls.py b/data/urls.py
new file mode 100644
index 000000000..7a81e6b91
--- /dev/null
+++ b/data/urls.py
@@ -0,0 +1,11 @@
+from django.urls import path
+
+from data.views import DagsValidation
+
+urlpatterns = [
+ path(
+ "dags/validations",
+ DagsValidation.as_view(),
+ name="dags_validations",
+ ),
+]
diff --git a/data/views.py b/data/views.py
new file mode 100644
index 000000000..ec2dee9e3
--- /dev/null
+++ b/data/views.py
@@ -0,0 +1,175 @@
+from django.contrib import messages
+from django.contrib.auth.mixins import LoginRequiredMixin
+from django.shortcuts import render
+from django.views.generic.edit import FormView
+
+from data.forms import SuggestionCohorteForm
+from data.models import SuggestionAction, SuggestionStatut
+
+
+class IsStaffMixin(LoginRequiredMixin):
+ def dispatch(self, request, *args, **kwargs):
+ if not request.user.is_staff:
+ return self.handle_no_permission()
+ return super().dispatch(request, *args, **kwargs)
+
+
+ACTION_TO_VERB = {
+ SuggestionAction.SOURCE_AJOUT: "ajoutera",
+ SuggestionAction.SOURCE_SUPPRESSION: "supprimera",
+ SuggestionAction.SOURCE_MISESAJOUR: "mettra à jour",
+}
+
+
+class DagsValidation(IsStaffMixin, FormView):
+ form_class = SuggestionCohorteForm
+ template_name = "data/dags_validations.html"
+ success_url = "/dags/validations"
+
+ def form_valid(self, form):
+ # MANAGE search and display suggestion_cohorte details
+ suggestion_cohorte = form.cleaned_data["suggestion_cohorte"]
+ if self.request.POST.get("search"):
+ context = {"form": form}
+ context["suggestion_cohorte_instance"] = suggestion_cohorte
+ suggestion_unitaires = suggestion_cohorte.suggestion_unitaires.all()
+ context["metadata"] = {
+ "nb_suggestions": suggestion_unitaires.count(),
+ "description": (
+ "La validation de cette cohorte de suggestion "
+ f"{ACTION_TO_VERB[suggestion_cohorte.type_action]} l'ensemble des "
+ "acteurs"
+ ),
+ "source": suggestion_cohorte.identifiant_action,
+ }
+ suggestion_unitaires = suggestion_unitaires.order_by("?")[:100]
+ context["suggestion_unitaires"] = suggestion_unitaires
+ return render(self.request, self.template_name, context)
+ # ELSE: update the status of the suggestion_cohorte and its
+ # suggestion_cohortelines
+ suggestion_cohorte = form.cleaned_data["suggestion_cohorte"]
+ new_status = (
+ SuggestionStatut.ATRAITER.value
+ if self.request.POST.get("dag_valid") == "1"
+ else SuggestionStatut.REJETER.value
+ )
+
+ suggestion_cohorte.suggestion_unitaires.all().update(statut=new_status)
+ suggestion_cohorte.statut = new_status
+ suggestion_cohorte.save()
+
+ messages.success(
+ self.request,
+ f"La cohorte {suggestion_cohorte} a été mise à jour avec le "
+ f"statut {new_status}",
+ )
+
+ return super().form_valid(form)
+
+ def form_invalid(self, form):
+ messages.error(self.request, "Il y a des erreurs dans le formulaire.")
+ return super().form_invalid(form)
+
+
+# class DagsValidationDeprecated(IsStaffMixin, FormView):
+# form_class = SuggestionCohorteForm
+# template_name = "qfdmo/dags_validations.html"
+# success_url = "/dags/validations"
+
+# def get_initial(self):
+# initial = super().get_initial()
+# initial["suggestion_cohorte"] = self.request.GET.get("suggestion_cohorte")
+# return initial
+
+# def post(self, request, *args, **kwargs):
+
+# dag_valid = request.POST.get("dag_valid")
+# if dag_valid in ["1", "0"]:
+# return self.form_valid(self.get_form())
+# else:
+# suggestion_cohorte_obj = SuggestionCohorte.objects.get(
+# pk=request.POST.get("suggestion_cohorte")
+# )
+# id = request.POST.get("id")
+# suggestion_unitaire = suggestion_cohorte_obj.suggestion_unitaires.filter(
+# id=id
+# ).first()
+# identifiant_unique = request.POST.get("identifiant_unique")
+# index = request.POST.get("index")
+# action = request.POST.get("action")
+
+# if action == "validate":
+# suggestion_unitaire.update_row_update_candidate(
+# SuggestionStatut.ATRAITER.value, index
+# )
+# elif action == "reject":
+# suggestion_unitaire.update_row_update_candidate(
+# SuggestionStatut.REJETER.value, index
+# )
+
+# updated_candidat = suggestion_unitaire.get_candidat(index)
+
+# return render(
+# request,
+# "qfdmo/partials/candidat_row.html",
+# {
+# "identifiant_unique": identifiant_unique,
+# "candidat": updated_candidat,
+# "index": index,
+# "request": request,
+# "suggestion_cohorte": request.POST.get("suggestion_cohorte"),
+# "suggestion_unitaire": suggestion_unitaire,
+# },
+# )
+
+# def get_context_data(self, **kwargs):
+# context = super().get_context_data(**kwargs)
+# if self.request.GET.get("suggestion_cohorte"):
+# suggestion_cohorte = SuggestionCohorte.objects.get(
+# pk=self.request.GET.get("suggestion_cohorte")
+# )
+# context["suggestion_cohorte_instance"] = suggestion_cohorte
+# suggestion_unitaires = (
+# suggestion_cohorte.suggestion_unitaires.all().order_by("?")[:100]
+# )
+# context["suggestion_unitaires"] = suggestion_unitaires
+
+# if (
+# suggestion_unitaires
+# and suggestion_unitaires[0].change_type == "UPDATE_ACTOR"
+# ):
+# # Pagination
+# suggestion_unitaires = (
+# suggestion_cohorte.suggestion_unitaires.all().order_by("id")
+# )
+# paginator = Paginator(suggestion_unitaires, 100)
+# page_number = self.request.GET.get("page")
+# page_obj = paginator.get_page(page_number)
+# context["suggestion_unitaires"] = page_obj
+
+# return context
+
+# def form_valid(self, form):
+# if not form.is_valid():
+# raise ValueError("Form is not valid")
+# suggestion_cohorte_id = form.cleaned_data["suggestion_cohorte"].id
+# suggestion_cohorte_obj = (
+# SuggestionCohorte.objects.get(pk=suggestion_cohorte_id)
+# )
+# new_status = (
+# SuggestionStatut.ATRAITER.value
+# if self.request.POST.get("dag_valid") == "1"
+# else SuggestionStatut.REJETER.value
+# )
+
+# # FIXME: I am not sure we need the filter here
+# suggestion_cohorte_obj.suggestion_unitaires.filter(
+# status=SuggestionStatut.AVALIDER.value
+# ).update(status=new_status)
+
+# logging.info(f"{suggestion_cohorte_id} - {self.request.user}")
+
+# suggestion_cohorte_obj.statut = new_status
+# suggestion_cohorte_obj.save()
+
+# return super().form_valid(form)
diff --git a/qfdmo/admin/__init__.py b/qfdmo/admin/__init__.py
index 627508b0b..983bf9b07 100644
--- a/qfdmo/admin/__init__.py
+++ b/qfdmo/admin/__init__.py
@@ -1,4 +1,3 @@
from .acteur import * # noqa
from .action import * # noqa
from .categorie_objet import * # noqa
-from .data import * # noqa
diff --git a/qfdmo/forms.py b/qfdmo/forms.py
index a4c2ba924..56e38568d 100644
--- a/qfdmo/forms.py
+++ b/qfdmo/forms.py
@@ -9,7 +9,7 @@
from qfdmo.fields import GroupeActionChoiceField
from qfdmo.geo_api import epcis_from, formatted_epcis_as_list_of_tuple
-from qfdmo.models import SousCategorieObjet, SuggestionCohorte
+from qfdmo.models import SousCategorieObjet
from qfdmo.models.action import (
Action,
GroupeAction,
@@ -17,7 +17,6 @@
get_directions,
get_ordered_directions,
)
-from qfdmo.models.data import SuggestionStatut
from qfdmo.widgets import (
AutoCompleteInput,
DSFRCheckboxSelectMultiple,
@@ -342,21 +341,6 @@ def load_choices(
)
-class DagsForm(forms.Form):
- suggestion_cohorte = forms.ModelChoiceField(
- label="Séléctionner l'execution d'un DAG",
- widget=forms.Select(
- attrs={
- "class": "fr-select",
- }
- ),
- queryset=SuggestionCohorte.objects.filter(
- statut=SuggestionStatut.ATRAITER.value
- ),
- required=True,
- )
-
-
class ConfiguratorForm(DsfrBaseForm):
# TODO: rename this field in all codebase -> actions_displayed
action_list = GroupeActionChoiceField(
diff --git a/qfdmo/management/commands/reinitialize_dagrun.py b/qfdmo/management/commands/reinitialize_dagrun.py
index 6560a8f5f..1dcfcac0b 100644
--- a/qfdmo/management/commands/reinitialize_dagrun.py
+++ b/qfdmo/management/commands/reinitialize_dagrun.py
@@ -7,13 +7,13 @@ class Command(BaseCommand):
def handle(self, *args, **options):
with connection.cursor() as cursor:
- # Truncate the table qfdmo_suggestioncohorte and qfdmo_suggestionunitaire
- cursor.execute("TRUNCATE TABLE qfdmo_suggestioncohorte CASCADE")
+ # Truncate the table data_suggestioncohorte and data_suggestionunitaire
+ cursor.execute("TRUNCATE TABLE data_suggestioncohorte CASCADE")
# Set auto-increment to 1
cursor.execute(
- "ALTER SEQUENCE qfdmo_suggestioncohorte_id_seq RESTART WITH 1"
+ "ALTER SEQUENCE data_suggestioncohorte_id_seq RESTART WITH 1"
)
cursor.execute(
- "ALTER SEQUENCE qfdmo_suggestionunitaire_id_seq RESTART WITH 1"
+ "ALTER SEQUENCE data_suggestionunitaire_id_seq RESTART WITH 1"
)
diff --git a/qfdmo/migrations/0110_suggestioncohorte_pourcentage_erreurs_tolerees_and_more.py b/qfdmo/migrations/0110_suggestioncohorte_pourcentage_erreurs_tolerees_and_more.py
new file mode 100644
index 000000000..9ca12f647
--- /dev/null
+++ b/qfdmo/migrations/0110_suggestioncohorte_pourcentage_erreurs_tolerees_and_more.py
@@ -0,0 +1,64 @@
+# Generated by Django 5.1.4 on 2025-01-09 12:38
+
+import django.core.validators
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("qfdmo", "0109_suggestioncohorte_remove_dagrunchange_dag_run_and_more"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="suggestioncohorte",
+ name="pourcentage_erreurs_tolerees",
+ field=models.IntegerField(
+ default=0,
+ help_text="Nombre d'erreurs tolérées en pourcentage",
+ validators=[
+ django.core.validators.MinValueValidator(0),
+ django.core.validators.MaxValueValidator(100),
+ ],
+ ),
+ ),
+ migrations.AlterField(
+ model_name="suggestioncohorte",
+ name="type_action",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("CLUSTERING", "regroupement/déduplication des acteurs"),
+ ("SOURCE", "ingestion de source de données"),
+ ("SOURCE_AJOUT", "ingestion de source de données - nouveau acteur"),
+ (
+ "SOURCE_MISESAJOUR",
+ "ingestion de source de données - modification d'acteur existant",
+ ),
+ ("SOURCE_SUPRESSION", "ingestion de source de données"),
+ ("ENRICHISSEMENT", "suggestion d'enrichissement"),
+ ],
+ max_length=250,
+ ),
+ ),
+ migrations.AlterField(
+ model_name="suggestionunitaire",
+ name="type_action",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("CLUSTERING", "regroupement/déduplication des acteurs"),
+ ("SOURCE", "ingestion de source de données"),
+ ("SOURCE_AJOUT", "ingestion de source de données - nouveau acteur"),
+ (
+ "SOURCE_MISESAJOUR",
+ "ingestion de source de données - modification d'acteur existant",
+ ),
+ ("SOURCE_SUPRESSION", "ingestion de source de données"),
+ ("ENRICHISSEMENT", "suggestion d'enrichissement"),
+ ],
+ max_length=250,
+ ),
+ ),
+ ]
diff --git a/qfdmo/migrations/0111_delete_bancache_and_more.py b/qfdmo/migrations/0111_delete_bancache_and_more.py
new file mode 100644
index 000000000..a0d8d291e
--- /dev/null
+++ b/qfdmo/migrations/0111_delete_bancache_and_more.py
@@ -0,0 +1,26 @@
+# Generated by Django 5.1.4 on 2025-01-09 13:50
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("qfdmo", "0110_suggestioncohorte_pourcentage_erreurs_tolerees_and_more"),
+ ]
+
+ operations = [
+ migrations.DeleteModel(
+ name="BANCache",
+ ),
+ migrations.RemoveField(
+ model_name="suggestionunitaire",
+ name="suggestion_cohorte",
+ ),
+ migrations.DeleteModel(
+ name="SuggestionCohorte",
+ ),
+ migrations.DeleteModel(
+ name="SuggestionUnitaire",
+ ),
+ ]
diff --git a/qfdmo/models/__init__.py b/qfdmo/models/__init__.py
index 23b9c02d5..80f4d9eba 100644
--- a/qfdmo/models/__init__.py
+++ b/qfdmo/models/__init__.py
@@ -1,5 +1,4 @@
from .acteur import * # noqa
from .action import * # noqa
from .categorie_objet import * # noqa
-from .data import * # noqa
from .utils import * # noqa
diff --git a/qfdmo/urls.py b/qfdmo/urls.py
index fa903e306..0c99afa07 100644
--- a/qfdmo/urls.py
+++ b/qfdmo/urls.py
@@ -16,7 +16,6 @@
)
from qfdmo.views.auth import LVAOLoginView
from qfdmo.views.configurator import AdvancedConfiguratorView, ConfiguratorView
-from qfdmo.views.dags import DagsValidation
urlpatterns = [
path("", direct_access, name="direct_access"),
@@ -87,11 +86,6 @@
TemplateView.as_view(template_name="tests/iframe.html"),
name="test_iframe",
),
- path(
- "dags/validations",
- DagsValidation.as_view(),
- name="dags_validations",
- ),
path(
"configurateur",
ConfiguratorView.as_view(),
diff --git a/qfdmo/views/dags.py b/qfdmo/views/dags.py
deleted file mode 100644
index 926cf4ddd..000000000
--- a/qfdmo/views/dags.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import logging
-
-from django.contrib import messages
-from django.contrib.auth.mixins import LoginRequiredMixin
-from django.core.paginator import Paginator
-from django.shortcuts import render
-from django.views.generic.edit import FormView
-
-from qfdmo.forms import DagsForm
-from qfdmo.models.data import SuggestionCohorte, SuggestionStatut
-
-
-class IsStaffMixin(LoginRequiredMixin):
- def dispatch(self, request, *args, **kwargs):
- if not request.user.is_staff:
- return self.handle_no_permission()
- return super().dispatch(request, *args, **kwargs)
-
-
-class DagsValidation(IsStaffMixin, FormView):
- form_class = DagsForm
- template_name = "data/dags_validations.html"
- success_url = "/dags/validations"
-
- def form_valid(self, form):
- # MANAGE search and display suggestion_cohorte details
- if self.request.POST.get("search"):
- suggestion_cohorte = form.cleaned_data["suggestion_cohorte"]
- context = {"form": form}
- context["suggestion_cohorte_instance"] = suggestion_cohorte
- suggestion_unitaires = (
- suggestion_cohorte.suggestion_unitaires.all().order_by("?")[:100]
- )
- context["suggestion_unitaires"] = suggestion_unitaires
- return render(self.request, self.template_name, context)
-
- # ELSE: update the status of the suggestion_cohorte and its
- # suggestion_cohortelines
- suggestion_cohorte = form.cleaned_data["suggestion_cohorte"]
- new_status = (
- SuggestionStatut.ATRAITER.value
- if self.request.POST.get("dag_valid") == "1"
- else SuggestionStatut.REJETER.value
- )
-
- # FIXME: I am not sure we need the filter here
- suggestion_cohorte.suggestion_unitaires.filter(
- statut=SuggestionStatut.AVALIDER.value
- ).update(statut=new_status)
- suggestion_cohorte.statut = new_status
- suggestion_cohorte.save()
- messages.success(
- self.request,
- f"La cohorte {suggestion_cohorte} a été mise à jour avec le "
- f"statut {new_status}",
- )
-
- return super().form_valid(form)
-
- def form_invalid(self, form):
- messages.error(self.request, "Il y a des erreurs dans le formulaire.")
- return super().form_invalid(form)
-
-
-class DagsValidationDeprecated(IsStaffMixin, FormView):
- form_class = DagsForm
- template_name = "qfdmo/dags_validations.html"
- success_url = "/dags/validations"
-
- def get_initial(self):
- initial = super().get_initial()
- initial["suggestion_cohorte"] = self.request.GET.get("suggestion_cohorte")
- return initial
-
- def post(self, request, *args, **kwargs):
-
- dag_valid = request.POST.get("dag_valid")
- if dag_valid in ["1", "0"]:
- return self.form_valid(self.get_form())
- else:
- suggestion_cohorte_obj = SuggestionCohorte.objects.get(
- pk=request.POST.get("suggestion_cohorte")
- )
- id = request.POST.get("id")
- suggestion_unitaire = suggestion_cohorte_obj.suggestion_unitaires.filter(
- id=id
- ).first()
- identifiant_unique = request.POST.get("identifiant_unique")
- index = request.POST.get("index")
- action = request.POST.get("action")
-
- if action == "validate":
- suggestion_unitaire.update_row_update_candidate(
- SuggestionStatut.ATRAITER.value, index
- )
- elif action == "reject":
- suggestion_unitaire.update_row_update_candidate(
- SuggestionStatut.REJETER.value, index
- )
-
- updated_candidat = suggestion_unitaire.get_candidat(index)
-
- return render(
- request,
- "qfdmo/partials/candidat_row.html",
- {
- "identifiant_unique": identifiant_unique,
- "candidat": updated_candidat,
- "index": index,
- "request": request,
- "suggestion_cohorte": request.POST.get("suggestion_cohorte"),
- "suggestion_unitaire": suggestion_unitaire,
- },
- )
-
- def get_context_data(self, **kwargs):
- context = super().get_context_data(**kwargs)
- if self.request.GET.get("suggestion_cohorte"):
- suggestion_cohorte = SuggestionCohorte.objects.get(
- pk=self.request.GET.get("suggestion_cohorte")
- )
- context["suggestion_cohorte_instance"] = suggestion_cohorte
- suggestion_unitaires = (
- suggestion_cohorte.suggestion_unitaires.all().order_by("?")[:100]
- )
- context["suggestion_unitaires"] = suggestion_unitaires
-
- if (
- suggestion_unitaires
- and suggestion_unitaires[0].change_type == "UPDATE_ACTOR"
- ):
- # Pagination
- suggestion_unitaires = (
- suggestion_cohorte.suggestion_unitaires.all().order_by("id")
- )
- paginator = Paginator(suggestion_unitaires, 100)
- page_number = self.request.GET.get("page")
- page_obj = paginator.get_page(page_number)
- context["suggestion_unitaires"] = page_obj
-
- return context
-
- def form_valid(self, form):
- if not form.is_valid():
- raise ValueError("Form is not valid")
- suggestion_cohorte_id = form.cleaned_data["suggestion_cohorte"].id
- suggestion_cohorte_obj = SuggestionCohorte.objects.get(pk=suggestion_cohorte_id)
- new_status = (
- SuggestionStatut.ATRAITER.value
- if self.request.POST.get("dag_valid") == "1"
- else SuggestionStatut.REJETER.value
- )
-
- # FIXME: I am not sure we need the filter here
- suggestion_cohorte_obj.suggestion_unitaires.filter(
- status=SuggestionStatut.AVALIDER.value
- ).update(status=new_status)
-
- logging.info(f"{suggestion_cohorte_id} - {self.request.user}")
-
- suggestion_cohorte_obj.statut = new_status
- suggestion_cohorte_obj.save()
-
- return super().form_valid(form)
diff --git a/templates/data/_partials/create_actor_event.html b/templates/data/_partials/create_actor_event.html
deleted file mode 100644
index 6d9272f4c..000000000
--- a/templates/data/_partials/create_actor_event.html
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-
-
-
-
- change_type
- meta_data
- Acteur
- Proposition de service
- suggestion
-
{{ key }} : {{ value }}
- {% endfor %} -Action | -Sous-Catégories | -
---|---|
{{ service.action }} | -
-
|
-
{{ suggestion_unitaire.suggestion }}-
{{ key }} : {{ value }}
+ {% endfor %} +Action | +Sous-Catégories | +
---|---|
{{ service.action }} | +
+
|
+
{{ suggestion_unitaire.suggestion }}+
{{ meta_title }} : {{ meta_data }}
-{% endfor %} -{{ suggestion_cohorte_instance.metadata }}-
Source : {{ metadata.source }}
+
Statut de la cohorte : {{ suggestion_cohorte_instance.get_statut_display }}
+
Description : {{ metadata.description }}
+
Nb de suggestions : {{ metadata.nb_suggestions }}
{{ key }} : {{ value }}
+ {% endfor %} +{{ suggestion_unitaire.suggestion }}+