diff --git a/config/settings/test.py b/config/settings/test.py index 88768d54..32efb52d 100644 --- a/config/settings/test.py +++ b/config/settings/test.py @@ -35,8 +35,10 @@ # Nécessaire pour la C.I. : fixe des valeurs par défaut pour les conteneurs # faire correspondre les valeurs définies dans la configuration de la CI -CORS_ALLOWED_ORIGIN_REGEXES = [os.getenv("DJANGO_CORS_ALLOWED_ORIGIN_REGEXES","*")] +CORS_ALLOWED_ORIGIN_REGEXES = [os.getenv("DJANGO_CORS_ALLOWED_ORIGIN_REGEXES", "*")] FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:3000") IC_TOKEN_URL = os.getenv("IC_TOKEN_URL", "https://whatever-oidc-token-url.com") AWS_STORAGE_BUCKET_NAME = os.getenv("AWS_STORAGE_BUCKET_NAME", "dora") - +SIB_ONBOARDING_LIST = os.getenv("SIB_ONBOARDING_LIST", "1") +SIB_ONBOARDING_PUTATIVE_MEMBER_LIST = os.getenv("SIB_ONBOARDING_PUTATIVE_MEMBER_LIST", "2") +SIB_ONBOARDING_MEMBER_LIST = os.getenv("SIB_ONBOARDING_MEMBER_LIST", "3") diff --git a/config/urls.py b/config/urls.py index 0a6118de..f76b534f 100644 --- a/config/urls.py +++ b/config/urls.py @@ -13,7 +13,6 @@ import dora.structures.views import dora.support.views import dora.users.views -from dora.data_inclusion.client import di_client_factory from dora.oidc.urls import oidc_patterns from .url_converters import InseeCodeConverter, SiretConverter @@ -66,26 +65,20 @@ register_converter(SiretConverter, "siret") -# injection conditionnelle du client D·I : voir conftest.py -di_client = di_client_factory() - private_api_patterns = [ path("auth/", include("dora.rest_auth.urls")), path( "search/", dora.services.views.search, - {"di_client": di_client}, ), path("stats/event/", dora.stats.views.log_event), path( "services-di//", dora.services.views.service_di, - {"di_client": di_client}, ), path( "services-di//share/", dora.services.views.share_di_service, - {"di_client": di_client}, ), path("admin-division-search/", dora.admin_express.views.search), path("admin-division-reverse-search/", dora.admin_express.views.reverse_search), diff --git a/dora/conftest.py b/dora/conftest.py index 11368c6f..6d2dd838 100644 --- a/dora/conftest.py +++ b/dora/conftest.py @@ -9,7 +9,7 @@ def patch_di_client(): # Remplace le client D·I par défaut : # permet de s'affranchir de `settings.IS_TESTING` pour la plupart des cas. # Chaque test peut par la suite choisir son instance de client (fake). - with patch("dora.data_inclusion.client.di_client_factory") as mocked_di_client: + with patch("dora.data_inclusion.di_client_factory") as mocked_di_client: mocked_di_client.return_value = None yield diff --git a/dora/data_inclusion/client.py b/dora/data_inclusion/client.py index 9903124e..3b4d1509 100644 --- a/dora/data_inclusion/client.py +++ b/dora/data_inclusion/client.py @@ -7,6 +7,8 @@ import requests from django.conf import settings +from .constants import THEMATIQUES_MAPPING_DORA_TO_DI + logger = logging.getLogger(__name__) @@ -120,7 +122,12 @@ def search_services( url.args["code_insee"] = code_insee if thematiques is not None: - url.args["thematiques"] = thematiques + enriched_thematiques = [] + for thematique in thematiques: + enriched_thematiques += THEMATIQUES_MAPPING_DORA_TO_DI.get( + thematique, [thematique] + ) + url.args["thematiques"] = enriched_thematiques if types is not None: url.args["types"] = types diff --git a/dora/data_inclusion/constants.py b/dora/data_inclusion/constants.py new file mode 100644 index 00000000..79350df2 --- /dev/null +++ b/dora/data_inclusion/constants.py @@ -0,0 +1,15 @@ +from collections import defaultdict + +# À une thématique DI correspond une thématique Dora +THEMATIQUES_MAPPING_DI_TO_DORA = { + "logement-hebergement--etre-accompagne-dans-son-projet-accession": "logement-hebergement--etre-accompagne-pour-se-loger", + "logement-hebergement--etre-accompagne-en cas-de-difficultes-financieres": "logement-hebergement--gerer-son-budget", + "logement-hebergement--financer-son-projet-travaux": "logement-hebergement--autre", +} + +# Inversion du dictionnaire +# À une thématique Dora correspond une liste de thématiques DI +THEMATIQUES_MAPPING_DORA_TO_DI = defaultdict(list) +for key, value in THEMATIQUES_MAPPING_DI_TO_DORA.items(): + if not value.endswith("--autre"): + THEMATIQUES_MAPPING_DORA_TO_DI[value].append(key) diff --git a/dora/data_inclusion/mappings.py b/dora/data_inclusion/mappings.py index 7c1018a9..1d6e943a 100644 --- a/dora/data_inclusion/mappings.py +++ b/dora/data_inclusion/mappings.py @@ -16,6 +16,8 @@ get_update_status, ) +from .constants import THEMATIQUES_MAPPING_DI_TO_DORA + DI_TO_DORA_DIFFUSION_ZONE_TYPE_MAPPING = { "commune": "city", "epci": "epci", @@ -114,12 +116,12 @@ def map_service(service_data: dict, is_authenticated: bool) -> dict: categories = None subcategories = None if service_data["thematiques"] is not None: - categories = ServiceCategory.objects.filter( - value__in=service_data["thematiques"] - ) - subcategories = ServiceSubCategory.objects.filter( - value__in=service_data["thematiques"] - ) + thematiques = [ + THEMATIQUES_MAPPING_DI_TO_DORA.get(thematique, thematique) + for thematique in service_data["thematiques"] + ] + categories = ServiceCategory.objects.filter(value__in=thematiques) + subcategories = ServiceSubCategory.objects.filter(value__in=thematiques) location_kinds = None if service_data["modes_accueil"] is not None: diff --git a/dora/data_inclusion/test_utils.py b/dora/data_inclusion/test_utils.py index 52842631..2a80694b 100644 --- a/dora/data_inclusion/test_utils.py +++ b/dora/data_inclusion/test_utils.py @@ -1,6 +1,8 @@ from typing import Optional from uuid import uuid4 +from .constants import THEMATIQUES_MAPPING_DORA_TO_DI + def make_di_service_data(**kwargs) -> dict: return { @@ -99,13 +101,18 @@ def search_services( services = [r for r in services if r["source"] in sources] if thematiques is not None: + enriched_thematiques = [] + for thematique in thematiques: + enriched_thematiques += THEMATIQUES_MAPPING_DORA_TO_DI.get( + thematique, [thematique] + ) services = [ r for r in services if any( t.startswith(requested_thematique) for t in r["thematiques"] - for requested_thematique in thematiques + for requested_thematique in enriched_thematiques ) ] diff --git a/dora/data_inclusion/tests.py b/dora/data_inclusion/tests.py index 6bc291ab..1c8f7820 100644 --- a/dora/data_inclusion/tests.py +++ b/dora/data_inclusion/tests.py @@ -1,43 +1,36 @@ -import unittest +from .constants import THEMATIQUES_MAPPING_DI_TO_DORA, THEMATIQUES_MAPPING_DORA_TO_DI +from .mappings import map_service +from .test_utils import FakeDataInclusionClient, make_di_service_data -from django.conf import settings -from rest_framework.test import APITestCase -from dora import data_inclusion +def test_map_service_thematiques_mapping(): + input_thematiques = [ + "logement-hebergement", + "logement-hebergement--connaissance-de-ses-droits-et-interlocuteurs", + "logement-hebergement--besoin-dadapter-mon-logement", + ] + list(THEMATIQUES_MAPPING_DI_TO_DORA.keys()) + expected_categories = ["logement-hebergement"] + expected_subcategories = [ + "logement-hebergement--connaissance-de-ses-droits-et-interlocuteurs", + "logement-hebergement--besoin-dadapter-mon-logement", + ] + list(THEMATIQUES_MAPPING_DI_TO_DORA.values()) -class DataInclusionIntegrationTestCase(APITestCase): - """These integration-level tests check the connection to data.inclusion. + di_service_data = make_di_service_data(thematiques=input_thematiques) + service = map_service(di_service_data, False) - They depend on the data.inclusion api and should not be run - systematically, because of their inherent high cost and instability. - """ + assert sorted(service["categories"]) == sorted(expected_categories) + assert sorted(service["subcategories"]) == sorted(expected_subcategories) - def setUp(self): - self.di_client = data_inclusion.di_client_factory() - @unittest.skipIf( - settings.SKIP_DI_INTEGRATION_TESTS, "data.inclusion api not available" - ) - def test_search_services(self): - self.di_client.search_services( - code_insee="91223", - thematiques=["mobilite--comprendre-et-utiliser-les-transports-en-commun"], - ) +def test_di_client_search_thematiques_mapping(): + input_thematique = list(THEMATIQUES_MAPPING_DORA_TO_DI.keys())[0] + output_thematique = list(THEMATIQUES_MAPPING_DORA_TO_DI.values())[0][0] - @unittest.skipIf( - settings.SKIP_DI_INTEGRATION_TESTS, "data.inclusion api not available" - ) - def test_list_services(self): - self.di_client.list_services(source="dora") + di_client = FakeDataInclusionClient() + di_service_data = make_di_service_data(thematiques=[output_thematique]) + di_client.services.append(di_service_data) - @unittest.skipIf( - settings.SKIP_DI_INTEGRATION_TESTS, "data.inclusion api not available" - ) - def test_retrieve_service(self): - services = self.di_client.list_services(source="dora") + results = di_client.search_services(thematiques=[input_thematique]) - self.di_client.retrieve_service( - source="dora", - id=services[0]["id"], - ) + assert len(results) == 1 diff --git a/dora/onboarding/__init__.py b/dora/onboarding/__init__.py index e284a7c5..85ca84f0 100644 --- a/dora/onboarding/__init__.py +++ b/dora/onboarding/__init__.py @@ -135,6 +135,29 @@ def _add_user_to_sib_list( return True +def _remove_from_sib_list( + client: sib_api.ContactsApi, user: User, sib_list_id: int +) -> bool: + # retire un utilisateur donné d'une liste SiB + try: + client.remove_contact_from_list( + sib_list_id, sib_api.RemoveContactFromList(emails=[user.email]) + ) + logger.info( + "L'utilisateur #%s a été retiré de la liste SiB: %s", user.pk, sib_list_id + ) + except SibApiException as exc: + logger.exception(exc) + logger.error( + "Impossible de retirer l'utilisateur #%s de la liste SiB: %s", + user.pk, + sib_list_id, + ) + return False + + return True + + def _create_sib_contact( client: sib_api.ContactsApi, user: User, attributes: dict, sib_list_id: int ) -> bool: @@ -252,3 +275,9 @@ def onboard_user(user: User, structure: Structure): # création ou maj du contact SiB _create_or_update_sib_contact(client, user, attributes, sib_list_id) + + # dans le cas d'un utilisateur passé membre, le retirer de la liste des invités + if sib_list_id == int(settings.SIB_ONBOARDING_MEMBER_LIST): + _remove_from_sib_list( + client, user, int(settings.SIB_ONBOARDING_PUTATIVE_MEMBER_LIST) + ) diff --git a/dora/onboarding/test.py b/dora/onboarding/test.py new file mode 100644 index 00000000..088ef27d --- /dev/null +++ b/dora/onboarding/test.py @@ -0,0 +1,123 @@ +from unittest.mock import Mock, patch + +import pytest +from django.conf import settings +from django.urls import reverse + +from dora.core.test_utils import make_structure, make_user +from dora.users.enums import MainActivity + + +@pytest.mark.parametrize( + "main_activity,expected_sib_list", + [ + (MainActivity.OFFREUR, settings.SIB_ONBOARDING_LIST), + (MainActivity.ACCOMPAGNATEUR, settings.SIB_ONBOARDING_PUTATIVE_MEMBER_LIST), + ( + MainActivity.ACCOMPAGNATEUR_OFFREUR, + settings.SIB_ONBOARDING_PUTATIVE_MEMBER_LIST, + ), + ], +) +@patch("dora.onboarding._create_or_update_sib_contact") +@patch("dora.onboarding._setup_sib_client", Mock(return_value=True)) +def test_onboard_other_activities( + mock_create_contact, main_activity, expected_sib_list, api_client +): + # Les utilisateurs ayant offreurs ou autre pour activité principale + # sont redirigés vers l'ancienne liste Brevo (onboarding "traditionnel"). + + # Les utilisateurs accompagnateurs ou accompagnateurs/offreurs + # sont "onboardés" sur la bonne liste Brevo des invités lors de leur première invitation. + + # note : le deuxième patch n'est pas pris en compte comme un paramètre du test + # (à cause de l'association directe/explicite à un nouveau mock) + + structure = make_structure() + # La création d'un admin de la structure est nécessaire pour que l'utilisateur + # soit rattaché en tant qu'invité (sinon il en devient le premier membre et admin). + make_user(structure=structure, is_admin=True) + invited_user = make_user(main_activity=main_activity) + + api_client.force_authenticate(user=invited_user) + api_client.post( + reverse("join-structure"), + data={ + # Utiliser le slug, car le SIRET sera invalide (random). + "structure_slug": structure.slug, + "cgu_version": "1", + }, + ) + + assert ( + invited_user in structure.putative_members.all() + ), "L'utilisateur n'est pas un invité de la structure" + assert mock_create_contact.called, "Le contact Brevo n'a pas été créé" + + _, user, attrs, sib_list = mock_create_contact.call_args.args + + assert user == invited_user, "L'utilisateur ne correspond pas" + assert attrs, "Les attributs Brevo ne sont pas définis" + assert ( + str(sib_list) == expected_sib_list + ), "L'utilisateur n'est pas rattaché à la bonne liste Brevo" + + +@pytest.mark.parametrize( + "main_activity,expected_sib_list", + [ + (MainActivity.ACCOMPAGNATEUR, settings.SIB_ONBOARDING_MEMBER_LIST), + (MainActivity.ACCOMPAGNATEUR_OFFREUR, settings.SIB_ONBOARDING_MEMBER_LIST), + ], +) +@patch("dora.onboarding._remove_from_sib_list") +@patch("dora.onboarding._create_or_update_sib_contact") +@patch("dora.onboarding._setup_sib_client", Mock(return_value=True)) +def test_onboard_new_member( + mock_create_contact, + mock_remove_from_list, + main_activity, + expected_sib_list, + api_client, +): + # Les utilisateurs accompagnateurs ou accompagnateurs/offreurs + # sont "onboardés" sur la liste Brevo des membres lors de leur premier rattachement à une structure. + + member = make_user(main_activity=main_activity) + structure = make_structure(putative_member=member) + admin = make_user(structure=structure, is_admin=True) + + # Cette action doit-être effectuée par un admin de la structure. + api_client.force_authenticate(user=admin) + + # L'utilisateur accepte l'invitation. + r = api_client.post( + reverse( + "structure-putative-member-accept-membership-request", + kwargs={"pk": structure.putative_membership.first().pk}, + ), + ) + + # Etant différent du traditionnel 200, on teste le statut de retour. + assert 201 == r.status_code, "Code de status invalide (201 attendu)" + + assert ( + member in structure.members.all() + ), "L'utilisateur n'est pas membre de la structure" + assert mock_create_contact.called, "Le contact Brevo n'a pas été créé" + + _, user, attrs, sib_list = mock_create_contact.call_args.args + + assert user == member, "L'utilisateur ne correspond pas" + assert attrs, "Les attributs Brevo ne sont pas définis" + assert ( + str(sib_list) == expected_sib_list + ), "L'utilisateur n'est pas rattaché à la bonne liste Brevo" + + # On retire un utilisateur de la liste Brevo "invité" après qu'il soit devenu membre. + assert ( + mock_remove_from_list.called + ), "Pas de retrait de l'utilisateur de la liste Brevo des invités" + mock_remove_from_list.assert_called_with( + True, user, int(settings.SIB_ONBOARDING_PUTATIVE_MEMBER_LIST) + ) diff --git a/dora/rest_auth/urls.py b/dora/rest_auth/urls.py index 9cd041f3..92770361 100644 --- a/dora/rest_auth/urls.py +++ b/dora/rest_auth/urls.py @@ -3,8 +3,8 @@ from . import views urlpatterns = [ - path("user-info/", views.user_info), - path("join-structure/", views.join_structure), - path("invite-first-admin/", views.invite_first_admin), - path("accept-cgu/", views.accept_cgu), + path("user-info/", views.user_info, name="user-info"), + path("join-structure/", views.join_structure, name="join-structure"), + path("invite-first-admin/", views.invite_first_admin, name="invite-first-admin"), + path("accept-cgu/", views.accept_cgu, name="accept-cgu"), ] diff --git a/dora/rest_auth/views.py b/dora/rest_auth/views.py index 83b36866..b6c68c25 100644 --- a/dora/rest_auth/views.py +++ b/dora/rest_auth/views.py @@ -178,7 +178,7 @@ def join_structure(request): ) # on teste au préalable si l'utilisateur était membre de la structure - # pour effectuer l'onboarding *après* le attachement + # pour effectuer l'onboarding *après* le rattachement was_already_member_of_a_structure = ( StructureMember.objects.filter(user=user).exists() or StructurePutativeMember.objects.filter( diff --git a/dora/services/constants.py b/dora/services/constants.py new file mode 100644 index 00000000..22aeb92d --- /dev/null +++ b/dora/services/constants.py @@ -0,0 +1,4 @@ +EXCLUDED_DI_SERVICES_THEMATIQUES = ( + # Thématiques n'étant pas à destination des bénéficiaires + "logement-hebergement--aides-financieres-investissement-locatif", +) diff --git a/dora/services/search.py b/dora/services/search.py index 04a9fd04..779ef259 100644 --- a/dora/services/search.py +++ b/dora/services/search.py @@ -17,6 +17,7 @@ from dora.core.constants import WGS84 from dora.structures.models import Structure +from .constants import EXCLUDED_DI_SERVICES_THEMATIQUES from .serializers import SearchResultSerializer from .utils import filter_services_by_city_code @@ -169,6 +170,16 @@ def _get_di_results( ) ] + # Exclus les services ayant des thématiques à exclure + raw_di_results = [ + result + for result in raw_di_results + if not any( + thematique in result["service"]["thematiques"] + for thematique in EXCLUDED_DI_SERVICES_THEMATIQUES + ) + ] + supported_service_kinds = models.ServiceKind.objects.values_list("value", flat=True) mapped_di_results = [ diff --git a/dora/services/serializers.py b/dora/services/serializers.py index fe7daf3b..ee3d2ac7 100644 --- a/dora/services/serializers.py +++ b/dora/services/serializers.py @@ -750,7 +750,7 @@ def get_service(self, obj): # le client D·I doit être importé avec le *même* chemin que # celui utilisé au moment du `patch` # (sinon, le mock échoue) - di_client = dora.data_inclusion.client.di_client_factory() + di_client = dora.data_inclusion.di_client_factory() try: di_service = ( diff --git a/dora/services/tests/test_search.py b/dora/services/tests/test_search.py index ef6ec6ed..fb9c3ab7 100644 --- a/dora/services/tests/test_search.py +++ b/dora/services/tests/test_search.py @@ -1,8 +1,15 @@ +from unittest import mock + import pytest from model_bakery import baker from dora.admin_express.models import AdminDivisionType -from dora.core.test_utils import make_service, make_structure, make_user +from dora.core.test_utils import ( + make_service, + make_structure, + make_user, +) +from dora.data_inclusion.test_utils import FakeDataInclusionClient, make_di_service_data from dora.services.enums import ServiceStatus @@ -52,3 +59,39 @@ def test_search_services_with_orphan_structure( [found] = response.data["services"] assert found["slug"] == orphan_service.slug + + +def test_search_services_excludes_some_action_logement_results(api_client): + # Le service ayant la thématique logement-hebergement--aides-financieres-investissement-locatif + # ne doit pas être retourné + + # le paramètre `city` est nécessaire a minima + city = baker.make("City") + + with mock.patch("dora.data_inclusion.di_client_factory") as mock_di_client_factory: + di_client = FakeDataInclusionClient() + service1 = make_di_service_data( + thematiques=[ + "logement-hebergement", + "logement-hebergement--aides-financieres-investissement-locatif", + "logement-hebergement--besoin-dadapter-mon-logement", + ] + ) + service2 = make_di_service_data( + thematiques=[ + "logement-hebergement", + "logement-hebergement--besoin-dadapter-mon-logement", + ] + ) + di_client.services.append(service1) + di_client.services.append(service2) + + mock_di_client_factory.return_value = di_client + + response = api_client.get(f"/search/?city={city.code}") + + assert response.status_code == 200 + + assert ( + len(response.data["services"]) == 1 + ), "un seul service devrait être retourné" diff --git a/dora/services/tests/test_services.py b/dora/services/tests/test_services.py index a5202de5..c2a0d576 100644 --- a/dora/services/tests/test_services.py +++ b/dora/services/tests/test_services.py @@ -1,4 +1,5 @@ from datetime import timedelta +from unittest import mock import requests from django.contrib.gis.geos import MultiPolygon, Point @@ -1034,7 +1035,20 @@ def setUp(self): self.di_client = FakeDataInclusionClient() self.factory = APIRequestFactory() - self.search = lambda request: search(request, di_client=self.di_client) + + def search(self, request, di_client=None): + with mock.patch( + "dora.data_inclusion.di_client_factory" + ) as mock_di_client_factory: + mock_di_client_factory.return_value = di_client or self.di_client + return search(request) + + def service_di(self, request, di_id): + with mock.patch( + "dora.data_inclusion.di_client_factory" + ) as mock_di_client_factory: + mock_di_client_factory.return_value = self.di_client + return service_di(request, di_id=di_id) def make_di_service(self, **kwargs) -> dict: service_data = make_di_service_data(**kwargs) @@ -1185,7 +1199,7 @@ def search_services(self, **kwargs): di_client = FaultyDataInclusionClient() request = self.factory.get("/search/", {"city": self.city1.code}) - response = search(request, di_client=di_client) + response = self.search(request, di_client) assert response.status_code == 200 # ajout des "city bounds" pour la carte assert len(response.data) == 2 @@ -1226,7 +1240,7 @@ def test_service_di_contains_service_fields(self): service_data = self.make_di_service() di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) for field in set(ServiceSerializer.Meta.fields): with self.subTest(field=field): @@ -1244,7 +1258,7 @@ def test_service_di_address(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["address1"], service_data["adresse"]) self.assertEqual(response.data["address2"], service_data["complement_adresse"]) @@ -1277,7 +1291,7 @@ def test_service_di_categories(self): service_data = self.make_di_service(thematiques=thematiques) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["categories"], categories) self.assertEqual( @@ -1315,7 +1329,7 @@ def test_service_di_beneficiaries_access_modes(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual( response.data["beneficiaries_access_modes"], @@ -1332,7 +1346,7 @@ def test_service_di_beneficiaries_access_modes_other(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual( response.data["beneficiaries_access_modes_other"], "Nous consulter" @@ -1365,7 +1379,7 @@ def test_service_di_coach_orientation_modes(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual( response.data["coach_orientation_modes"], coach_orientation_modes @@ -1381,7 +1395,7 @@ def test_service_di_coach_orientation_modes_other(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual( response.data["coach_orientation_modes_other"], "Nous consulter" @@ -1399,7 +1413,7 @@ def test_service_di_concerned_public(self): service_data = self.make_di_service(profils=profils) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["concerned_public"], concerned_public) self.assertEqual( @@ -1426,7 +1440,7 @@ def test_service_di_contact(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["contact_email"], courriel) self.assertEqual(response.data["contact_name"], contact_nom_prenom) @@ -1446,7 +1460,7 @@ def test_service_di_credentials(self): service_data = self.make_di_service(justificatifs=justificatifs) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["credentials"], credentials) self.assertEqual( @@ -1499,7 +1513,7 @@ def test_service_di_diffusion_zone(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual( response.data["diffusion_zone_details"], diffusion_zone_details @@ -1528,7 +1542,7 @@ def test_service_di_fee(self): service_data = self.make_di_service(frais=frais) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["fee_condition"], fee_condition) @@ -1542,7 +1556,7 @@ def test_service_di_fee(self): service_data = self.make_di_service(frais_autres=frais_autres) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["fee_details"], fee_details) @@ -1557,7 +1571,7 @@ def test_service_di_location_kinds(self): service_data = self.make_di_service(modes_accueil=modes_accueil) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["location_kinds"], location_kinds) self.assertEqual( @@ -1575,7 +1589,7 @@ def test_service_di_requirements(self): service_data = self.make_di_service(pre_requis=pre_requis) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["requirements"], requirements) self.assertEqual( @@ -1593,7 +1607,7 @@ def test_service_di_kinds(self): service_data = self.make_di_service(types=types) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["kinds"], kinds) self.assertEqual(response.data["kinds_display"], kinds_display) @@ -1614,7 +1628,7 @@ def test_service_di_desc(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["name"], service_data["nom"]) self.assertEqual(response.data["full_desc"], desc) @@ -1634,7 +1648,7 @@ def test_service_di_date(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["creation_date"], service_data["date_creation"]) self.assertEqual(response.data["modification_date"], service_data["date_maj"]) @@ -1656,7 +1670,7 @@ def test_service_di_structure(self): ) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["structure"], service_data["structure_id"]) self.assertEqual( @@ -1669,7 +1683,7 @@ def test_service_di_is_cumulative(self): service_data = self.make_di_service(cumulable=cumulable) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["is_cumulative"], cumulable) @@ -1683,7 +1697,7 @@ def test_service_di_update_status(self): service_data = self.make_di_service(date_maj=date_maj) di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["update_status"], update_status) @@ -1691,7 +1705,7 @@ def test_service_di_misc(self): service_data = self.make_di_service() di_id = self.get_di_id(service_data) request = self.factory.get(f"/services-di/{di_id}/") - response = service_di(request, di_id=di_id, di_client=self.di_client) + response = self.service_di(request, di_id=di_id) self.assertEqual(response.status_code, 200) self.assertEqual(response.data["can_write"], False) self.assertEqual(response.data["has_already_been_unpublished"], None) diff --git a/dora/services/views.py b/dora/services/views.py index f5ecfb7a..c1057bab 100644 --- a/dora/services/views.py +++ b/dora/services/views.py @@ -715,7 +715,7 @@ def filter_custom_choices(choices): @api_view() @permission_classes([permissions.AllowAny]) -def service_di(request, di_id: str, di_client=None): +def service_di(request, di_id: str): """Retrieve a single service from data.inclusion. The ``di_client`` acts as an entrypoint to the data.inclusion service repository. @@ -725,6 +725,8 @@ def service_di(request, di_id: str, di_client=None): source_di, di_service_id = di_id.split("--") + di_client = data_inclusion.di_client_factory() + try: raw_service = di_client.retrieve_service(source=source_di, id=di_service_id) except requests.ConnectionError: @@ -743,10 +745,11 @@ def service_di(request, di_id: str, di_client=None): def share_di_service( request, di_id: str, - di_client=None, ): source_di, di_service_id = di_id.split("--") + di_client = data_inclusion.di_client_factory() + try: raw_service = di_client.retrieve_service(source=source_di, id=di_service_id) except requests.ConnectionError: @@ -763,7 +766,7 @@ def share_di_service( @api_view() @permission_classes([permissions.AllowAny]) -def search(request, di_client=None): +def search(request): city_code = request.GET.get("city") categories = request.GET.get("cats") subcategories = request.GET.get("subs") @@ -785,6 +788,8 @@ def search(request, di_client=None): city_code = arrdt_to_main_insee_code(city_code) city = get_object_or_404(City, pk=city_code) + di_client = data_inclusion.di_client_factory() + sorted_services = search_services( request=request, di_client=di_client, diff --git a/dora/structures/management/commands/import_dr_dt_ft.py b/dora/structures/management/commands/import_dr_dt_ft.py index b407aac9..fcf60040 100644 --- a/dora/structures/management/commands/import_dr_dt_ft.py +++ b/dora/structures/management/commands/import_dr_dt_ft.py @@ -19,19 +19,28 @@ class Command(BaseCommand): help = "Importe les DG/DR/DT France Travail, ainsi que leur code SAFIR" + def add_arguments(self, parser): + parser.add_argument( + "--file", help="Fichier à importer (si diffétent de `drdt.csv`)" + ) + def finalize_structure(self, structure, safir): structure.code_safir_pe = safir structure.source = SOURCE structure.creator = BOT_USER structure.last_editor = BOT_USER structure.typology = Typologie.FT.value - structure.save() - structure.national_labels.add(LABEL) - send_moderation_notification( - structure, - BOT_USER, - "Structure créée à partir de l’import DR/DT France Travail", - ModerationStatus.VALIDATED, + try: + structure.save() + except Exception as ex: + print(f"Erreur à la création de la structure SIRET:{structure.siret} / SAFIR: {safir} ({ex})") + else: + structure.national_labels.add(LABEL) + send_moderation_notification( + structure, + BOT_USER, + "Structure créée à partir de l’import DR/DT France Travail", + ModerationStatus.VALIDATED, ) def create_structure(self, siret, name, safir): @@ -57,10 +66,15 @@ def create_branch(self, parent, name, safir): self.finalize_structure(structure, safir) def handle(self, *args, **options): + file = options["file"] + with transaction.atomic(durable=True): - mapping_file_path = ( - Path(__file__).parent.parent.parent / "data" / "drdt.csv" - ) + if file: + mapping_file_path = Path(file) + else: + mapping_file_path = ( + Path(__file__).parent.parent.parent / "data" / "drdt.csv" + ) with open(mapping_file_path) as mapping_file: reader = csv.DictReader(mapping_file) @@ -88,7 +102,7 @@ def handle(self, *args, **options): .exists() ): print( - f"{name} : ce safir existe déjà, mais il est associé à un autre siret" + f"{name} : ce code SAFIR ({safir}) existe déjà, mais il est associé à un autre SIRET" ) continue try: diff --git a/dora/structures/migrations/0072_alter_structure_name.py b/dora/structures/migrations/0072_alter_structure_name.py new file mode 100644 index 00000000..3fbfd4a6 --- /dev/null +++ b/dora/structures/migrations/0072_alter_structure_name.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-10-03 13:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("structures", "0071_structure_putative_members"), + ] + + operations = [ + migrations.AlterField( + model_name="structure", + name="name", + field=models.CharField(db_index=True, max_length=255, verbose_name="Nom"), + ), + ] diff --git a/dora/structures/models.py b/dora/structures/models.py index eedf4ae6..289350c2 100644 --- a/dora/structures/models.py +++ b/dora/structures/models.py @@ -205,7 +205,7 @@ class Structure(ModerationMixin, models.Model): db_index=True, ) - name = models.CharField(verbose_name="Nom", max_length=255) + name = models.CharField(verbose_name="Nom", max_length=255, db_index=True) typology = models.CharField( choices=zip([t.value for t in Typologie], [t.label for t in Typologie]), diff --git a/dora/structures/views.py b/dora/structures/views.py index 0a15a86a..781ea155 100644 --- a/dora/structures/views.py +++ b/dora/structures/views.py @@ -7,6 +7,7 @@ from rest_framework.decorators import action, api_view, permission_classes from rest_framework.response import Response +from dora import onboarding from dora.core.models import ModerationStatus from dora.core.notify import send_moderation_notification from dora.core.pagination import OptionalPageNumberPagination @@ -52,22 +53,27 @@ def get_queryset(self): only_managed = self.request.query_params.get("managed") only_pending = self.request.query_params.get("pending") only_active = self.request.query_params.get("active") + search_string = self.request.query_params.get("search", None) + + structures = Structure.objects.select_related("source", "parent").all() + + if search_string: + structures = structures.filter(name__icontains=search_string) - all_structures = Structure.objects.select_related("source", "parent").all() if only_managed: if not user or not user.is_authenticated: return Structure.objects.none() if user.is_staff: - return all_structures.order_by("-modification_date").distinct() + return structures.order_by("-modification_date").distinct() elif user.is_manager and user.departments: return ( - all_structures.filter(department__in=user.departments) + structures.filter(department__in=user.departments) .order_by("-modification_date") .distinct() ) else: return ( - all_structures.filter(membership__user=user) + structures.filter(membership__user=user) .order_by("-modification_date") .distinct() ) @@ -75,20 +81,20 @@ def get_queryset(self): if not user or not user.is_authenticated: return Structure.objects.none() return ( - all_structures.filter(putative_membership__user=user) + structures.filter(putative_membership__user=user) .exclude(putative_membership__invited_by_admin=True) .order_by("-modification_date") .distinct() ) elif only_active: qs = ( - all_structures.filter(services__status=ServiceStatus.PUBLISHED) + structures.filter(services__status=ServiceStatus.PUBLISHED) .order_by("-modification_date") .distinct() ) return qs else: - return all_structures.order_by("-modification_date") + return structures.order_by("-modification_date") def get_serializer_class(self): if self.action == "list": @@ -299,6 +305,8 @@ def accept_membership_request(self, request, pk): if not structure.can_edit_members(request_user): raise exceptions.PermissionDenied + new_member = pm.user + with transaction.atomic(durable=True): membership = StructureMember.objects.create( user=pm.user, @@ -308,6 +316,13 @@ def accept_membership_request(self, request, pk): pm.delete() membership.notify_access_granted() + # À ce point, il peut être nécessaire de déclencher un onboarding : + # la *première* acceptation en tant que membre d'une structure déclenche + # l'inscription à une liste Brevo. + if new_member.membership.count() == 1: + # on vient d'enregistrer le nouveau membre (1 seule structure) + onboarding.onboard_user(new_member, structure) + return Response(status=201) @action( diff --git a/envs-example/dev.env b/envs-example/dev.env index ad4e8d24..bb265e41 100644 --- a/envs-example/dev.env +++ b/envs-example/dev.env @@ -40,6 +40,6 @@ DATA_INCLUSION_URL=https://data-inclusion-api-staging.osc-secnum-fr1.scalingo.io # TODO: exclure dora avant merge # TODO: put this in (admin)model ? or take it as input from front ? # TODO: "mediation_numerique_???" -DATA_INCLUSION_STREAM_SOURCES=dora,soliguide,odspep,monenfant +DATA_INCLUSION_STREAM_SOURCES=action-logement,agefiph,data-inclusion,france-travail,fredo,mediation-numerique,mes-aides,monenfant,odspep,pole-emploi,reseau-alpha,soliguide INCLUDES_DI_SERVICES_IN_SAVED_SEARCH_NOTIFICATIONS=False diff --git a/queries/metabase/01-base/0010_mb_structure.sql b/queries/metabase/01-base/0010_mb_structure.sql index 719bba64..49cd21aa 100644 --- a/queries/metabase/01-base/0010_mb_structure.sql +++ b/queries/metabase/01-base/0010_mb_structure.sql @@ -7,3 +7,6 @@ select -- noqa: AM04 select concat('https://dora.inclusion.beta.gouv.fr/structures/', slug) ) as dora_url from structures_structure; + +-- Keys & constraints +alter table mb_structure add primary key (id); diff --git a/queries/metabase/01-base/0080_mb_serviceview_all.sql b/queries/metabase/01-base/0080_mb_serviceview_all.sql index e7516dc5..259cb02b 100644 --- a/queries/metabase/01-base/0080_mb_serviceview_all.sql +++ b/queries/metabase/01-base/0080_mb_serviceview_all.sql @@ -52,3 +52,13 @@ left join services_service as s on sv.service_id = s.id left join structures_structure as st on sv.structure_id = st.id; alter table mb_serviceview_all add primary key (id); + +-- Indexes +create index idx_mb_serviceview_all_structure_id +on mb_serviceview_all ("structure_id"); + +create index idx_mb_serviceview_all_user_id +on mb_serviceview_all ("user_id"); + +create index idx_mb_serviceview_all_is_staff_is_manager +on mb_serviceview_all ("is_staff", "is_manager"); diff --git a/queries/metabase/01-base/0110_mb_putative_members.sql b/queries/metabase/01-base/0110_mb_putative_members.sql index 149af8c5..1679bfef 100644 --- a/queries/metabase/01-base/0110_mb_putative_members.sql +++ b/queries/metabase/01-base/0110_mb_putative_members.sql @@ -61,15 +61,8 @@ where order by mu.date_joined desc; -- Indexes - -create index mb_putative_members_date_joined_idx on public.mb_putative_members ( - "Date de création" -); -create index mb_putative_members_dpt_idx on public.mb_putative_members ( - "Département" -); -create index mb_putative_members_dpt_is_valid on public.mb_putative_members ( - "E-mail validé" -); +create index mb_putative_members_date_joined_idx on mb_putative_members ("Date de création"); +create index mb_putative_members_dpt_idx on mb_putative_members ("Département"); +create index mb_putative_members_dpt_is_valid on mb_putative_members ("E-mail validé"); comment on table mb_putative_members is 'Liste des membres en attente de rattachement'; diff --git a/queries/metabase/01-base/0120_mb_notification_logs.sql b/queries/metabase/01-base/0120_mb_notification_logs.sql index c3a1b0e5..0e6352b1 100644 --- a/queries/metabase/01-base/0120_mb_notification_logs.sql +++ b/queries/metabase/01-base/0120_mb_notification_logs.sql @@ -18,12 +18,8 @@ where -- log level 20 => INFO and level = 20; - -create index mb_notification_logs_date_creation_idx on public.mb_notification_logs ( - "date_creation" -); -create index mb_notification_logs_tache_idx on public.mb_notification_logs ( - "tache" -); +-- Indexes +create index mb_notification_logs_date_creation_idx on mb_notification_logs ("date_creation"); +create index mb_notification_logs_tache_idx on mb_notification_logs ("tache"); comment on table mb_notification_logs is 'Historique des tâches de traitement de notification'; diff --git a/queries/metabase/01-base/0130_mb_stats_searchview.sql b/queries/metabase/01-base/0130_mb_stats_searchview.sql new file mode 100644 index 00000000..62814caa --- /dev/null +++ b/queries/metabase/01-base/0130_mb_stats_searchview.sql @@ -0,0 +1,34 @@ +drop table if exists mb_stats_searchview cascade; + +create table mb_stats_searchview as +select + "search".id, + "search".path, + "search".date, + "search".anonymous_user_hash, + "search".is_logged, + "search".is_staff, + "search".is_manager, + "search".is_an_admin, + "search".user_kind, + "search".department, + "search".city_code, + "search".num_results, + "search".user_id, + "search".num_di_results, + "search".num_di_results_top10, + "search".results_slugs_top10 +from stats_searchview as "search"; + +-- Keys & constraints +alter table mb_stats_searchview add primary key (id); + +-- Indexes +create index idx_mb_stats_searchview_is_staff_is_manager_is_logged +on mb_stats_searchview using btree ( + "is_staff", "is_manager", "is_logged" +); +create index idx_mb_stats_searchview_user_id +on mb_stats_searchview using btree ( + "user_id" +); diff --git a/queries/metabase/01-base/0140_mb_stats_structureview.sql b/queries/metabase/01-base/0140_mb_stats_structureview.sql new file mode 100644 index 00000000..5d00eaa3 --- /dev/null +++ b/queries/metabase/01-base/0140_mb_stats_structureview.sql @@ -0,0 +1,37 @@ +drop table if exists mb_stats_structureview cascade; + +create table mb_stats_structureview as +select + "search".id, + "search".path, + "search".date, + "search".anonymous_user_hash, + "search".is_logged, + "search".is_staff, + "search".is_manager, + "search".is_an_admin, + "search".user_kind, + "search".is_structure_member, + "search".is_structure_admin, + "search".structure_department, + "search".structure_city_code, + "search".structure_id, + "search".user_id, + "search".structure_source +from stats_structureview as "search"; + +-- Keys & constraints +alter table mb_stats_structureview add primary key (id); + +-- Indexes +create index idx_mb_stats_structureview_date +on public.mb_stats_structureview +using btree ("date"); + +create index idx_mb_stats_structureview_filters +on public.mb_stats_structureview +using btree ("is_structure_member", "is_structure_admin", "is_staff"); + +create index idx_mb_stats_structureview_user_id +on public.mb_stats_structureview +using btree ("user_id"); diff --git a/queries/metabase/01-base/README.md b/queries/metabase/01-base/README.md index 3480411e..05659278 100644 --- a/queries/metabase/01-base/README.md +++ b/queries/metabase/01-base/README.md @@ -1 +1,5 @@ Ici toutes les définitions de tables et vues metabase anciennement maj via scripts shell. + +Règles à avoir en tête : +* tout fichier est considéré comme une table ou vue qui sera DROP par le script +* de fait, il n'est pas possible d'avoir de fichiers avec juste de l'ajout de CONSTRAINTS ou d'INDEX ; il faut penser à reconstruire la table \ No newline at end of file diff --git a/queries/metabase/03-questions/0040_q_orphan_users.sql b/queries/metabase/03-questions/0040_q_orphan_users.sql index da7660dc..c2dda488 100644 --- a/queries/metabase/03-questions/0040_q_orphan_users.sql +++ b/queries/metabase/03-questions/0040_q_orphan_users.sql @@ -36,11 +36,11 @@ order by mu.date_joined desc; alter table public.q_orphan_users add constraint q_orphan_users_pk primary key ( -- noqa: LT05 "ID utilisateur" ); -create index q_orphan_users_valide_idx on public.q_orphan_users ( - "E-mail validé" -); -create index q_orphan_users_date_joined_idx on public.q_orphan_users ( - "Date de création" -); + +create index q_orphan_users_valide_idx +on q_orphan_users ("E-mail validé"); + +create index q_orphan_users_date_joined_idx +on q_orphan_users ("Date de création"); comment on table q_orphan_users is 'Liste des utilisateurs non rattachés à une structure et sans invitation'; diff --git a/queries/metabase/03-questions/0050_q_users_before_ic.sql b/queries/metabase/03-questions/0050_q_users_before_ic.sql index 4a88eb4a..0d6ac030 100644 --- a/queries/metabase/03-questions/0050_q_users_before_ic.sql +++ b/queries/metabase/03-questions/0050_q_users_before_ic.sql @@ -35,8 +35,8 @@ order by mu.date_joined desc; alter table public.q_users_before_ic add constraint q_users_before_ic_pk primary key ( -- noqa: LT05 "ID utilisateur" ); -create index q_users_before_ic_date_joined_idx on public.q_users_before_ic ( - "Date de création" -); + +create index q_users_before_ic_date_joined_idx +on q_users_before_ic ("Date de création"); comment on table q_users_before_ic is 'Utilisateurs avec e-mail non validé, créés avant IC'; diff --git a/queries/metabase/03-questions/0060_q_searches_with_few_results.sql b/queries/metabase/03-questions/0060_q_searches_with_few_results.sql new file mode 100644 index 00000000..939cf250 --- /dev/null +++ b/queries/metabase/03-questions/0060_q_searches_with_few_results.sql @@ -0,0 +1,37 @@ +-- Question(s) concernée(s): +-- • "Nombre de recherches aboutissant à peu de résultats (<6)" +-- • "Nombre de recherches aboutissant à 0 résultat" + +drop table if exists q_searches_with_few_results; + +create table q_searches_with_few_results as ( + select + "search".id as "id", + "search".path as "path", + "search".date as "date", + "search".num_results as "num_results", + "search".department as "department", + ss.label as "label", + category.label as "category" + from stats_searchview as "search" + left join + stats_searchview_categories as "service" + on "search".id = "service".searchview_id + left join + services_servicecategory as category + on "service".servicecategory_id = category.id + left join + structures_structuremember as member + on "search".user_id = member.user_id + left join mb_structure as structure on member.structure_id = structure.id + left join + structures_structure_national_labels as ssnl + on structure.id = ssnl.structure_id + left join + structures_structurenationallabel as ss + on ssnl.structurenationallabel_id = ss.id + where + "search".num_results < 6 + and "search".is_staff = false + and "search".is_manager = false +); diff --git a/queries/metabase/03-questions/0070_q_searches_by_category_department_label.sql b/queries/metabase/03-questions/0070_q_searches_by_category_department_label.sql new file mode 100644 index 00000000..12074ccb --- /dev/null +++ b/queries/metabase/03-questions/0070_q_searches_by_category_department_label.sql @@ -0,0 +1,36 @@ +-- Question(s) concernée(s): +-- • "Nombre de recherches par thématique" + +drop table if exists q_searches_by_category_department_label; + +create table q_searches_by_category_department_label as ( + select + category.label as "category", + "search".department as "department", + ss.label as "label", + count(distinct "search".id) as "count" + from stats_searchview as "search" + left join + stats_searchview_categories as "service" + on "search".id = "service".searchview_id + left join + services_servicecategory as category + on "service".servicecategory_id = category.id + left join + structures_structuremember as member + on "search".user_id = member.user_id + left join mb_structure as structure on member.structure_id = structure.id + left join + structures_structure_national_labels as ssnl + on structure.id = ssnl.structure_id + left join + structures_structurenationallabel as ss + on ssnl.structurenationallabel_id = ss.id + where + "search".is_staff = false + and "search".is_manager = false + group by + category.label, + "search".department, + ss.label +); diff --git a/queries/metabase/03-questions/0080_q_searches_by_monthyear_department_label.sql b/queries/metabase/03-questions/0080_q_searches_by_monthyear_department_label.sql new file mode 100644 index 00000000..3eede3ab --- /dev/null +++ b/queries/metabase/03-questions/0080_q_searches_by_monthyear_department_label.sql @@ -0,0 +1,36 @@ +-- Question(s) concernée(s): +-- • "Evolution du nombre de recherhces - avec typologie" + +drop table if exists q_searches_by_monthyear_department_label; + +create table q_searches_by_monthyear_department_label as ( + select + "search".department as "department", + ss.label as "label", + to_char("search".date, 'YYYY-MM') as "month_year", + count(distinct "search".id) as "count" + from stats_searchview as "search" + left join + stats_searchview_categories as "service" + on "search".id = "service".searchview_id + left join + services_servicecategory as category + on "service".servicecategory_id = category.id + left join + structures_structuremember as member + on "search".user_id = member.user_id + left join mb_structure as structure on member.structure_id = structure.id + left join + structures_structure_national_labels as ssnl + on structure.id = ssnl.structure_id + left join + structures_structurenationallabel as ss + on ssnl.structurenationallabel_id = ss.id + where + "search".is_staff = false + and "search".is_manager = false + group by + to_char("search".date, 'YYYY-MM'), + "search".department, + ss.label +); diff --git a/queries/metabase/03-questions/0090_q_searches_on_last_30_days.sql b/queries/metabase/03-questions/0090_q_searches_on_last_30_days.sql new file mode 100644 index 00000000..04fd7a13 --- /dev/null +++ b/queries/metabase/03-questions/0090_q_searches_on_last_30_days.sql @@ -0,0 +1,35 @@ +-- Question(s) concernée(s): +-- • "Recherches sur les 30 derniers jours" + +drop table if exists q_searches_on_last_30_days; + +create table q_searches_on_last_30_days as ( + select + "search".id as "id", + "search".path as "path", + "search".date as "date", + "search".num_results as "num_results", + "search".department as "department", + ss.label as "label" + from stats_searchview as "search" + left join + stats_searchview_categories as category + on "search".id = category.searchview_id + left join + services_servicecategory as thematique + on category.servicecategory_id = thematique.id + left join + structures_structuremember as member + on "search".user_id = member.user_id + left join mb_structure as structure on member.structure_id = structure.id + left join + structures_structure_national_labels as ssnl + on structure.id = ssnl.structure_id + left join + structures_structurenationallabel as ss + on ssnl.structurenationallabel_id = ss.id + where + "search".date >= now() - INTERVAL '30 days' + and "search".is_staff = false + and "search".is_manager = false +); diff --git a/queries/metabase/03-questions/0100_q_mobilisations_by_category_department.sql b/queries/metabase/03-questions/0100_q_mobilisations_by_category_department.sql new file mode 100644 index 00000000..ef2a73e4 --- /dev/null +++ b/queries/metabase/03-questions/0100_q_mobilisations_by_category_department.sql @@ -0,0 +1,36 @@ +-- Question(s) concernée(s): +-- • "Nombre de mobilisations par thématique" + +drop table if exists q_mobilisations_by_category_department; + +create table q_mobilisations_by_category_department as ( + select + mobilisation.id as "id", + mobilisation.path as "path", + mobilisation.date as "date", + structure.department as "department", + ss.label as "label", + category.label as "category" + from stats_mobilisationevent as mobilisation + left join + services_service_categories as "service" + on mobilisation.service_id = "service".service_id + left join + services_servicecategory as category + on "service".servicecategory_id = category.id + left join + structures_structuremember as member + on mobilisation.user_id = member.user_id + left join mb_structure as structure on member.structure_id = structure.id + left join + structures_structure_national_labels as ssnl + on structure.id = ssnl.structure_id + left join + structures_structurenationallabel as ss + on ssnl.structurenationallabel_id = ss.id + where + mobilisation.is_staff = false + and mobilisation.is_manager = false + and mobilisation.is_structure_member = false + and mobilisation.is_structure_admin = false +); diff --git a/queries/stats/01-utilisateurs/0010_v_searches_for_user.sql b/queries/stats/01-utilisateurs/0010_v_searches_for_user.sql index caf21c4c..11838d73 100644 --- a/queries/stats/01-utilisateurs/0010_v_searches_for_user.sql +++ b/queries/stats/01-utilisateurs/0010_v_searches_for_user.sql @@ -3,7 +3,7 @@ -- non-membre de l'équipe, -- et non-offreur seulement. -drop view v_searches_for_user; +drop view if exists v_searches_for_user; create or replace view v_searches_for_user as select @@ -13,7 +13,7 @@ select date_part('month', date) as mois, date_part('year', date) as annee, count(*) as nb -from stats_searchview +from mb_stats_searchview where user_kind != 'offreur' and not is_manager diff --git a/queries/stats/01-utilisateurs/0011_v_service_views_for_user.sql b/queries/stats/01-utilisateurs/0011_v_service_views_for_user.sql index 0edcd850..15bb1f5e 100644 --- a/queries/stats/01-utilisateurs/0011_v_service_views_for_user.sql +++ b/queries/stats/01-utilisateurs/0011_v_service_views_for_user.sql @@ -4,7 +4,7 @@ -- pas porté par une structure dont l'utilisateur est membre, -- et non-offreur seulement. -drop view v_service_views_for_user; +drop view if exists v_service_views_for_user; create or replace view v_service_views_for_user as select diff --git a/queries/stats/01-utilisateurs/0012_v_structure_views_for_user.sql b/queries/stats/01-utilisateurs/0012_v_structure_views_for_user.sql index 4717f25b..92ef5491 100644 --- a/queries/stats/01-utilisateurs/0012_v_structure_views_for_user.sql +++ b/queries/stats/01-utilisateurs/0012_v_structure_views_for_user.sql @@ -4,7 +4,7 @@ -- pas une structure dont l'utilisateur est membre, -- et non-offreur seulement. -drop view v_structure_views_for_user; +drop view if exists v_structure_views_for_user; create or replace view v_structure_views_for_user as select diff --git a/requirements/base.txt b/requirements/base.txt index 7b38e8b9..cd9ce0d0 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -10,15 +10,15 @@ djangorestframework-gis==1.1 djangorestframework==3.15.2 furl==2.1.3 hiredis==3.0.0 -humanize==4.10.0 +humanize==4.11.0 mjml-python==1.3.3 model-bakery==1.19.5 osm_opening_hours==0.1.1 -psycopg[binary]==3.2.1 +psycopg==3.2.3 PyJWT==2.6.0 # conflit avec ggshield sur versions supérieures -redis==5.0.8 +redis==5.1.1 requests==2.32.3 -sentry-sdk==2.13.0 +sentry-sdk==2.14.0 sib-api-v3-sdk==7.6.0 Unidecode==1.3.8 whitenoise==6.7.0 diff --git a/requirements/test.txt b/requirements/test.txt index 6977c1d1..3324bdb1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,8 +4,8 @@ django-environ==0.11.2 django-querycount==0.8.3 djhtml==3.0.6 freezegun==1.5.1 -ruff==0.6.4 -pytest==8.3.2 +ruff==0.6.9 +pytest==8.3.3 pytest-django==4.9.0 sqlfluff==3.1.1 requests-mock==1.12.1 diff --git a/tools/update-metabase-db.sh b/tools/update-metabase-db.sh index d91253ad..c564ce4a 100755 --- a/tools/update-metabase-db.sh +++ b/tools/update-metabase-db.sh @@ -1,16 +1,42 @@ #!/bin/bash +set -e +set -o pipefail + +# Couleurs ANSI +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color (reset) + # Vérification de la présence du endpoint Metabase dans l'environnement if [ -z "$METABASE_DB_URL" ];then echo "Pas de serveur Metabase connu ; export abandonné."; exit 0; fi -# Installe la dernière version de psql +echo -e "${CYAN}→ Installation de la dernière version de \`psql\`${NC}" dbclient-fetcher psql +echo " " -# Installe et exporte les requêtes SQL du dossier `queries` +echo -e "${CYAN}→ Désactive les messages de niveau \"NOTICE\"${NC}" +psql $METABASE_DB_URL -c "SET client_min_messages TO WARNING;" +echo " " + +echo -e "${CYAN}→ Installation et export des requêtes SQL du dossier \`queries\`${NC}" +echo -e "${YELLOW} tools/utils/install-sql-scripts.sh queries${NC}" tools/utils/install-sql-scripts.sh queries +echo " " -# Exporte les tables de production restantes vers Metabase +echo -e "${CYAN}→ Export des tables de production restantes vers Metabase${NC}" +echo -e "${YELLOW} tools/utils/export-db-metabase.sh${NC}" tools/utils/export-db-metabase.sh +echo " " -# Synchronise le schéma de la base de données +echo -e "${CYAN}→ Synchronisation du schéma de la base de données dans Metabase${NC}" +echo -e "${YELLOW} tools/utils/sync-metabase-schemas.sh${NC}" tools/utils/sync-metabase-schemas.sh +echo " " + +echo -e "${CYAN}→ Réactive les messages de niveau \"NOTICE\"${NC}" +psql $METABASE_DB_URL -c "SET client_min_messages TO NOTICE;" +echo " " diff --git a/tools/utils/export-db-metabase.sh b/tools/utils/export-db-metabase.sh index 3ce10185..e3883bde 100755 --- a/tools/utils/export-db-metabase.sh +++ b/tools/utils/export-db-metabase.sh @@ -5,5 +5,25 @@ export DEST_DB_URL=$METABASE_DB_URL +# Suppression en cascade et "à la main" de tables +# L'instruction ci-dessous pg_dump $DATABASE_URL ... | psql -q $DEST_DB_URL ne +# supprime pas corectement les tables, ce qui créée des doublons de données. +psql $DEST_DB_URL -c " +DO \$\$ +DECLARE + r RECORD; +BEGIN + FOR r IN + SELECT tablename + FROM pg_tables + WHERE tablename LIKE 'structures_%' + OR tablename LIKE 'stats_%' + OR tablename LIKE 'orientations_%' + LOOP + EXECUTE 'DROP TABLE ' || r.tablename || ' CASCADE'; + END LOOP; +END \$\$; +" + # Export des tables vers METABASE pg_dump $DATABASE_URL -O -c --if-exists -t orientations_* -t stats_* -t structures_* -t services_servicesource -t services_bookmark -t services_servicefee -t services_accesscondition -t services_beneficiaryaccessmode -t services_coachorientationmode -t services_concernedpublic -t services_credential -t services_locationkind -t services_requirement -t services_service_access_conditions -t services_service_beneficiaries_access_modes -t services_service_categories -t services_service_coach_orientation_modes -t services_service_concerned_public -t services_service_credentials -t services_service_kinds -t services_service_location_kinds -t services_service_requirements -t services_service_subcategories -t services_servicecategory -t services_servicekind -t services_servicemodificationhistoryitem -t services_servicestatushistoryitem -t services_servicesubcategory -t services_savedsearch -t services_savedsearch_fees -t services_savedsearch_kinds -t services_savedsearch_subcategories | psql -q $DEST_DB_URL diff --git a/tools/utils/install-sql-scripts.sh b/tools/utils/install-sql-scripts.sh index 7d42bce6..7a7c827f 100755 --- a/tools/utils/install-sql-scripts.sh +++ b/tools/utils/install-sql-scripts.sh @@ -6,6 +6,43 @@ export SRC_DB_URL=$DATABASE_URL export DEST_DB_URL=$METABASE_DB_URL +# Note : +# workaround pour permettre de détruire en avance ce qui doit être +# importé sur la base cible. +# Pourquoi ? +# Les ordres DROP TABLE générés par pg_dump ne contiennent *pas* de clause CASCADE. +# => les tables référencées par des vues ne peuvent pas être détruites et +# recréés uniquement par le dump (seulement en détruisant et recréant la base). +# Des discussions pour inclure une clause spécifique --drop-cascade dans pg_dump +# ont eu lieu il y quelques années, sans résultat. +# Solution pas très propre, mais au moins fonctionnelle +# le temps d'en trouver une meilleure (ou pas). +function drop_table_or_view_in_cascade_if_exists() { + local tblname=$1 + + # Vérification si l'objet est une table + is_table=$(psql $DEST_DB_URL -qAt -c "SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = '$tblname' AND table_schema = 'public');") + + # Vérification si l'objet est une vue + is_view=$(psql $DEST_DB_URL -qAt -c "SELECT EXISTS (SELECT 1 FROM information_schema.views WHERE table_name = '$tblname' AND table_schema = 'public');") + + if [ "$is_table" = "t" ]; then + if psql $DEST_DB_URL -q -c "DROP TABLE $tblname CASCADE;"; then + echo "La table '$tblname' a été supprimée." + else + echo "Erreur lors de la suppression de la table '$tblname'." + fi + elif [ "$is_view" = "t" ]; then + if psql $DEST_DB_URL -q -c "DROP VIEW $tblname CASCADE;"; then + echo "La vue '$tblname' a été supprimée." + else + echo "Erreur lors de la suppression de la vue '$tblname'." + fi + else + echo "L'objet '$tblname' n'a pas été trouvé (en tant que table ou vue), aucune suppression effectuée." + fi +} + function walkDirs() { local d="$1" local tables_stmt='' @@ -14,50 +51,37 @@ function walkDirs() { echo "> Dossier '$f'" walkDirs "$f" elif [ "${f##*.}" = "sql" ]; then - echo "Exécution de '$f' sur la DB source" + echo -e "🔄 Exécution de '$f' sur la DB source" psql $SRC_DB_URL -q -f "$f" - # - # Nommage des fichiers : (/d+_)nom_de_table(.sql) + + # Nommage des fichiers : (/d+_)nom_de_table(.sql) tblname=$(basename "$f" .sql) tblname=$(echo $tblname | cut -d"_" -f2-) echo "Ajout de '$tblname' pour le dump vers DB destination" tables_stmt+="-t $tblname " - - # Note : - # workaround pour permettre de détruire en avance ce qui doit être - # importé sur la base cible. - # Pourquoi ? - # Les ordres DROP TABLE générés par pg_dump ne contiennent *pas* de clause CASCADE. - # => les tables référencées par des vues ne peuvent pas être détruites et - # recréés uniquement par le dump (seulement en détruisant et recréant la base). - # Des discussions pour inclure une clause spécifique --drop-cascade dans pg_dump - # ont eu lieu il y quelques années, sans résultat. - # Solution pas très propre, mais au moins fonctionnelle - # le temps d'en trouver une meilleure (ou pas). + echo "Suppression de '$tblname' sur la DB de destination" - psql -q -c "DROP TABLE $tblname CASCADE;" $DEST_DB_URL - psql -q -c "DROP VIEW $tblname CASCADE;" $DEST_DB_URL - - echo "--" + drop_table_or_view_in_cascade_if_exists "$tblname" + echo " " fi done if [ -n "$tables_stmt" ]; then echo "Export du dump vers la DB de destination" pg_dump $SRC_DB_URL -O -c $tables_stmt | psql -q $DEST_DB_URL echo "Dump exporté" + echo " " fi } if [ $# -ne 1 ]; then - echo "Usage: $0 dir_path" + echo "❌ Usage: $0 dir_path" exit 1 fi if [ ! -d "$1" ]; then - echo "Erreur: $1 n'est pas un répertoire valide" + echo "❌ Erreur: $1 n'est pas un répertoire valide" exit 1 fi walkDirs "$1" -echo "--" -echo "Terminé!" +echo " " diff --git a/tools/utils/sync-metabase-schemas.sh b/tools/utils/sync-metabase-schemas.sh index ed61250e..d3b1787b 100755 --- a/tools/utils/sync-metabase-schemas.sh +++ b/tools/utils/sync-metabase-schemas.sh @@ -10,9 +10,6 @@ # voir dans l'espace administrateur, rubrique : 'Bases de données' # l'ID est affiché dans l'URL - -echo "Synchronisation des schémas et champs Metabase" - if [ -z ${METABASE_API_URL} ]; then echo " > l'URL de l'API Metabase n'est pas défini" exit 1