From 907c66975b6bf7647ddc05fdfaa42db84e22e7c4 Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Fri, 13 Dec 2024 14:31:00 +0100 Subject: [PATCH 01/10] Created api/v1/production-locations/{os_id}/ PATCH endpoint --- src/django/api/constants.py | 1 + .../dtos/create_moderation_event_dto.py | 5 +- .../creation/moderation_event_creator.py | 5 +- .../api/views/v1/production_locations.py | 70 +++++++++++++++++-- 4 files changed, 72 insertions(+), 9 deletions(-) diff --git a/src/django/api/constants.py b/src/django/api/constants.py index 96170b2f8..95407ee28 100644 --- a/src/django/api/constants.py +++ b/src/django/api/constants.py @@ -226,6 +226,7 @@ class APIV1LocationContributionErrorMessages: 'address. This may be due to incorrect, incomplete, or ambiguous ' 'information. Please verify and try again.' ) + LOCATION_NOT_FOUND = ('The location with the given ID was not found.') @staticmethod def invalid_data_type_error(data_type: str) -> str: diff --git a/src/django/api/moderation_event_actions/creation/dtos/create_moderation_event_dto.py b/src/django/api/moderation_event_actions/creation/dtos/create_moderation_event_dto.py index 21c9f7173..cfbc9b9a6 100644 --- a/src/django/api/moderation_event_actions/creation/dtos/create_moderation_event_dto.py +++ b/src/django/api/moderation_event_actions/creation/dtos/create_moderation_event_dto.py @@ -4,13 +4,16 @@ from rest_framework import status from api.models.moderation_event import ModerationEvent +from api.models.contributor.contributor import Contributor +from api.models.facility.facility import Facility @dataclass class CreateModerationEventDTO: - contributor_id: int + contributor: Contributor raw_data: Dict request_type: str + os: Facility = None cleaned_data: Dict = field(default_factory=dict) source: str = '' geocode_result: Dict = field(default_factory=dict) diff --git a/src/django/api/moderation_event_actions/creation/moderation_event_creator.py b/src/django/api/moderation_event_actions/creation/moderation_event_creator.py index e56ca5eb5..4c6d02c9c 100644 --- a/src/django/api/moderation_event_actions/creation/moderation_event_creator.py +++ b/src/django/api/moderation_event_actions/creation/moderation_event_creator.py @@ -20,12 +20,13 @@ def perform_event_creation( return event_dto event_dto.moderation_event = ModerationEvent.objects.create( - contributor=processed_event.contributor_id, + contributor=processed_event.contributor, request_type=processed_event.request_type, raw_data=processed_event.raw_data, cleaned_data=processed_event.cleaned_data, geocode_result=processed_event.geocode_result, - source=processed_event.source + source=processed_event.source, + os=processed_event.os ) return event_dto diff --git a/src/django/api/views/v1/production_locations.py b/src/django/api/views/v1/production_locations.py index c8aa76969..3434305b5 100644 --- a/src/django/api/views/v1/production_locations.py +++ b/src/django/api/views/v1/production_locations.py @@ -28,6 +28,7 @@ from api.moderation_event_actions.creation.dtos.create_moderation_event_dto \ import CreateModerationEventDTO from api.models.moderation_event import ModerationEvent +from api.models.facility.facility import Facility from api.throttles import DataUploadThrottle from api.constants import ( APIV1CommonErrorMessages, @@ -69,12 +70,14 @@ def __get_action_permissions(self) -> List: ''' Returns the list of permissions specific to the current action. ''' - if self.action == 'create': + if (self.action == 'create' + or self.action == 'partial_update'): return [IsRegisteredAndConfirmed] return [] def get_throttles(self): - if self.action == 'create': + if (self.action == 'create' + or self.action == 'partial_update'): return [DataUploadThrottle()] # Call the parent method to use the default throttling setup in the @@ -85,9 +88,10 @@ def get_parsers(self): ''' Override the default parser classes for specific actions. ''' - if self.request.method == 'POST': - # Use JSONParser for the 'create' action to restrict all media - # types except 'application/json'. + if (self.request.method == 'POST' + or self.request.method == 'PATCH'): + # Use JSONParser for the 'create' and 'partial_update' actions to + # restrict all media types except 'application/json'. return [JSONParser()] # Call the parent method to use the default parsers setup in the @@ -165,7 +169,7 @@ def create(self, request): location_contribution_strategy ) event_dto = CreateModerationEventDTO( - contributor_id=request.user.contributor, + contributor=request.user.contributor, raw_data=request.data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -184,3 +188,57 @@ def create(self, request): }, status=result.status_code ) + + @transaction.atomic + def partial_update(self, request, pk=None): + if switch_is_active('disable_list_uploading'): + raise ServiceUnavailableException( + APIV1CommonErrorMessages.MAINTENANCE_MODE + ) + if not Facility.objects.filter(id=pk).exists(): + specific_error = APIV1LocationContributionErrorMessages \ + .LOCATION_NOT_FOUND + return Response( + {'detail': specific_error}, + status=status.HTTP_404_NOT_FOUND + ) + if not isinstance(request.data, dict): + data_type = type(request.data).__name__ + specific_error = APIV1LocationContributionErrorMessages \ + .invalid_data_type_error(data_type) + return Response( + { + 'detail': APIV1CommonErrorMessages.COMMON_REQ_BODY_ERROR, + 'errors': [{ + 'field': NON_FIELD_ERRORS_KEY, + 'detail': specific_error + }] + }, + status=status.HTTP_400_BAD_REQUEST + ) + + location_contribution_strategy = LocationContribution() + moderation_event_creator = ModerationEventCreator( + location_contribution_strategy + ) + event_dto = CreateModerationEventDTO( + contributor=request.user.contributor, + os=Facility.objects.get(id=pk), + raw_data=request.data, + request_type=ModerationEvent.RequestType.UPDATE.value + ) + result = moderation_event_creator.perform_event_creation(event_dto) + + if result.errors: + return Response( + result.errors, + status=result.status_code) + + return Response( + { + 'moderation_id': result.moderation_event.uuid, + 'moderation_status': result.moderation_event.status, + 'created_at': result.moderation_event.created_at + }, + status=result.status_code + ) From e19e224b4d6247cb7cd14ba3aa0bf13d67ea3d9f Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Mon, 16 Dec 2024 14:48:43 +0100 Subject: [PATCH 02/10] Fix moderation events indexing and tests --- .../test_location_contribution_strategy.py | 24 +++++++++---------- src/logstash/indexes/moderation_events.json | 1 + 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/django/api/tests/test_location_contribution_strategy.py b/src/django/api/tests/test_location_contribution_strategy.py index 8fdde3e8b..121a8ecd5 100644 --- a/src/django/api/tests/test_location_contribution_strategy.py +++ b/src/django/api/tests/test_location_contribution_strategy.py @@ -70,7 +70,7 @@ def test_source_set_as_api_regardless_of_whether_passed(self, mock_get): self.assertNotIn('source', self.common_valid_input_data) event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=self.common_valid_input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -137,7 +137,7 @@ def test_invalid_source_value_cannot_be_accepted(self, mock_get): # Check the length validation. event_dto_1 = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=invalid_input_data_1, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -150,7 +150,7 @@ def test_invalid_source_value_cannot_be_accepted(self, mock_get): # Check validation of accepted values. event_dto_2 = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=invalid_input_data_2, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -163,7 +163,7 @@ def test_invalid_source_value_cannot_be_accepted(self, mock_get): # Check the accepted data type validation for the source field. event_dto_3 = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=invalid_input_data_3, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -189,7 +189,7 @@ def test_mapping_of_unsupported_fields_by_contricleaner_with_valid_data( } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -256,7 +256,7 @@ def test_mapping_of_unsupported_fields_by_contricleaner_with_invalid_data( } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -289,7 +289,7 @@ def test_handling_of_cc_list_level_errors(self): } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -332,7 +332,7 @@ def test_handling_of_cc_handler_not_set_exception(self, mock_process_data): } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -368,7 +368,7 @@ def test_handling_geocoded_no_results_error(self, mock_get): } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -397,7 +397,7 @@ def test_handling_geocoding_internal_error(self, mock_geocode_address): } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -454,7 +454,7 @@ def test_moderation_event_is_created_with_coordinates_properly(self): } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) @@ -629,7 +629,7 @@ def test_moderation_event_is_created_without_coordinates_properly( } event_dto = CreateModerationEventDTO( - contributor_id=self.contributor, + contributor=self.contributor, raw_data=input_data, request_type=ModerationEvent.RequestType.CREATE.value ) diff --git a/src/logstash/indexes/moderation_events.json b/src/logstash/indexes/moderation_events.json index 35f9a1d95..518e87b1f 100644 --- a/src/logstash/indexes/moderation_events.json +++ b/src/logstash/indexes/moderation_events.json @@ -8,6 +8,7 @@ "number_of_replicas": 1 }, "mappings": { + "dynamic": false, "properties": { "moderation_id": { "type": "keyword" From fb18164cb61f8191b44974c28f6fc5ca27a32aba Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Mon, 16 Dec 2024 17:55:40 +0100 Subject: [PATCH 03/10] Write tests --- src/django/api/constants.py | 2 +- .../test_location_contribution_strategy.py | 153 +++++++- ...est_production_locations_partial_update.py | 336 ++++++++++++++++++ .../api/views/v1/production_locations.py | 1 + 4 files changed, 489 insertions(+), 3 deletions(-) create mode 100644 src/django/api/tests/test_production_locations_partial_update.py diff --git a/src/django/api/constants.py b/src/django/api/constants.py index 95407ee28..7b191f542 100644 --- a/src/django/api/constants.py +++ b/src/django/api/constants.py @@ -226,7 +226,7 @@ class APIV1LocationContributionErrorMessages: 'address. This may be due to incorrect, incomplete, or ambiguous ' 'information. Please verify and try again.' ) - LOCATION_NOT_FOUND = ('The location with the given ID was not found.') + LOCATION_NOT_FOUND = ('The location with the given id was not found.') @staticmethod def invalid_data_type_error(data_type: str) -> str: diff --git a/src/django/api/tests/test_location_contribution_strategy.py b/src/django/api/tests/test_location_contribution_strategy.py index 121a8ecd5..116b5455a 100644 --- a/src/django/api/tests/test_location_contribution_strategy.py +++ b/src/django/api/tests/test_location_contribution_strategy.py @@ -4,10 +4,16 @@ from unittest.mock import Mock, patch from rest_framework.test import APITestCase +from allauth.account.models import EmailAddress +from django.contrib.gis.geos import Point from api.models.moderation_event import ModerationEvent from api.models.contributor.contributor import Contributor from api.models.user import User +from api.models.facility.facility_list import FacilityList +from api.models.facility.facility_list_item import FacilityListItem +from api.models.facility.facility import Facility +from api.models.source import Source from api.tests.test_data import ( geocoding_data, geocoding_no_results @@ -408,8 +414,7 @@ def test_handling_geocoding_internal_error(self, mock_geocode_address): self.assertIsNone(result.moderation_event) self.assertEqual(result.errors, expected_error_result) - def test_moderation_event_is_created_with_coordinates_properly(self): - + def test_moderation_event_creation_with_coordinates_for_create(self): input_data = { 'source': 'SLC', 'name': 'Blue Horizon Facility', @@ -499,9 +504,153 @@ def test_moderation_event_is_created_with_coordinates_properly(self): # was provided during the creation of the moderation event. self.assertIsNone(moderation_event.os) + def test_moderation_event_creation_with_valid_data_for_update(self): + # Create a new user and contributor for the production location that + # already exists in the system while processing the location + # contribution. + existing_location_user_email = 'test2@example.com' + existing_location_user_password = '4567test' + existing_location_user = User.objects.create( + email=existing_location_user_email + ) + existing_location_user.set_password( + existing_location_user_password + ) + existing_location_user.save() + EmailAddress.objects.create( + user=existing_location_user, + email=existing_location_user_email, + verified=True, + primary=True + ) + + existing_location_contributor = Contributor.objects.create( + admin=existing_location_user, + name='test contributor 2', + contrib_type=Contributor.OTHER_CONTRIB_TYPE, + ) + + # Create the production location to ensure the existing location is in + # place before processing the contribution. + list = FacilityList.objects.create( + header='header', file_name='one', name='New List Test' + ) + source = Source.objects.create( + source_type=Source.LIST, + facility_list=list, + contributor=existing_location_contributor + ) + list_item = FacilityListItem.objects.create( + name='Gamma Tech Manufacturing Plant', + address='1574 Quantum Avenue, Building 4B, Technopolis', + country_code='YT', + sector=['Apparel'], + row_index=1, + status=FacilityListItem.CONFIRMED_MATCH, + source=source + ) + production_location = Facility.objects.create( + name=list_item.name, + address=list_item.address, + country_code=list_item.country_code, + location=Point(0, 0), + created_from=list_item + ) + + input_data = { + 'source': 'SLC', + 'name': 'Blue Horizon Facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'country': 'US', + 'sector': ['Apparel', 'Equipment'], + 'coordinates': { + 'lat': 51.078389, + 'lng': 16.978477 + }, + 'product_type': ['Random product type'] + } + + expected_raw_data = deepcopy(input_data) + expected_cleaned_data = { + 'raw_json': { + 'lat': 51.078389, + 'lng': 16.978477, + 'name': 'Blue Horizon Facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'country': 'US', + 'sector': ['Apparel', 'Equipment'], + 'product_type': ['Random product type'] + }, + 'name': 'Blue Horizon Facility', + 'clean_name': 'blue horizon facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'clean_address': '990 spring garden st. philadelphia pa 19123', + 'country_code': 'US', + 'sector': ['Unspecified'], + 'fields': { + 'product_type': [ + 'Apparel', + 'Equipment', + 'Random product type' + ], + 'lat': 51.078389, + 'lng': 16.978477, + 'country': 'US' + }, + 'errors': [] + } + + event_dto = CreateModerationEventDTO( + contributor=self.contributor, + raw_data=input_data, + request_type=ModerationEvent.RequestType.UPDATE.value, + os=production_location + ) + result = self.moderation_event_creator.perform_event_creation( + event_dto + ) + self.assertEqual(result.status_code, 202) + + moderation_event = result.moderation_event + + self.assertIsNotNone(moderation_event) + self.assertTrue(self.is_valid_uuid(moderation_event.uuid)) + + stringified_created_at = moderation_event.created_at.strftime( + '%Y-%m-%dT%H:%M:%S.%f' + ) + 'Z' + self.assertTrue( + self.is_valid_date_with_microseconds(stringified_created_at) + ) + + stringified_updated_at = moderation_event.updated_at.strftime( + '%Y-%m-%dT%H:%M:%S.%f' + ) + 'Z' + self.assertTrue( + self.is_valid_date_with_microseconds(stringified_updated_at) + ) + + self.assertIsNone(moderation_event.status_change_date) + self.assertEqual(moderation_event.request_type, 'UPDATE') + self.assertEqual(moderation_event.raw_data, expected_raw_data) + self.assertEqual(moderation_event.cleaned_data, expected_cleaned_data) + # The geocode result should be empty because the coordinates provided + # did not trigger the Google API geocoding. + self.assertEqual(moderation_event.geocode_result, {}) + self.assertEqual(moderation_event.status, 'PENDING') + self.assertEqual(moderation_event.source, 'SLC') + # The claim field should be None because no claim relation was + # provided during the creation of the moderation event. + self.assertIsNone(moderation_event.claim) + self.assertEqual(moderation_event.contributor, self.contributor) + self.assertEqual(moderation_event.os.id, production_location.id) + @patch('api.geocoding.requests.get') def test_moderation_event_is_created_without_coordinates_properly( self, mock_get): + # This test focuses on testing the case when the coordinates were not + # passed, and geocoding should be performed for the particular + # contribution. mock_get.return_value = Mock(ok=True, status_code=200) mock_get.return_value.json.return_value = geocoding_data diff --git a/src/django/api/tests/test_production_locations_partial_update.py b/src/django/api/tests/test_production_locations_partial_update.py new file mode 100644 index 000000000..0c9b09762 --- /dev/null +++ b/src/django/api/tests/test_production_locations_partial_update.py @@ -0,0 +1,336 @@ +import json + +from unittest.mock import Mock, patch +from rest_framework.test import APITestCase +from django.urls import reverse +from allauth.account.models import EmailAddress +from waffle.testutils import override_switch +from django.contrib.gis.geos import Point + +from api.models.moderation_event import ModerationEvent +from api.models.contributor.contributor import Contributor +from api.models.facility.facility_list import FacilityList +from api.models.facility.facility_list_item import FacilityListItem +from api.models.source import Source +from api.models.user import User +from api.models.facility.facility import Facility +from api.views.v1.url_names import URLNames +from api.tests.test_data import geocoding_data + + +class TestProductionLocationsPartialUpdate(APITestCase): + def setUp(self): + # Create a valid Contributor specifically for this test. + user_email = 'test@example.com' + user_password = 'example123' + self.user = User.objects.create(email=user_email) + self.user.set_password(user_password) + self.user.save() + + EmailAddress.objects.create( + user=self.user, email=user_email, verified=True, primary=True + ) + + contributor = Contributor.objects.create( + admin=self.user, + name='test contributor 1', + contrib_type=Contributor.OTHER_CONTRIB_TYPE + ) + + self.login(user_email, user_password) + + # Create a valid Facility entry specifically for this test, to be used + # for making PATCH requests for it. + list = FacilityList.objects.create( + header='header', file_name='one', name='New List Test' + ) + source = Source.objects.create( + source_type=Source.LIST, + facility_list=list, + contributor=contributor + ) + list_item = FacilityListItem.objects.create( + name='Gamma Tech Manufacturing Plant', + address='1574 Quantum Avenue, Building 4B, Technopolis', + country_code='YT', + sector=['Apparel'], + row_index=1, + status=FacilityListItem.CONFIRMED_MATCH, + source=source + ) + self.production_location = Facility.objects.create( + name=list_item.name, + address=list_item.address, + country_code=list_item.country_code, + location=Point(0, 0), + created_from=list_item + ) + + self.url = reverse( + URLNames.PRODUCTION_LOCATIONS + '-detail', + args=[self.production_location.id]) + self.common_valid_req_body = json.dumps({ + 'name': 'Blue Horizon Facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'country': 'US' + }) + + def login(self, email: str, password: str) -> None: + self.client.logout() + self.client.login(email=email, password=password) + + def test_only_registered_and_confirmed_has_access(self): + expected_response_body = { + 'detail': ( + 'User must be registered and have confirmed their email to ' + 'access.' + ) + } + + saved_email_address = EmailAddress.objects.get_primary(self.user) + # Purposely make the email address unverified to trigger a permission + # error. + saved_email_address.verified = False + saved_email_address.save() + + response = self.client.patch( + self.url, + self.common_valid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 403) + self.assertEqual( + json.loads(response.content), + expected_response_body + ) + + @patch('api.geocoding.requests.get') + def test_default_throttling_is_applied(self, mock_get): + mock_get.return_value = Mock(ok=True, status_code=200) + mock_get.return_value.json.return_value = geocoding_data + + # Simulate 30 requests. + for _ in range(30): + response = self.client.patch( + self.url, + self.common_valid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 202) + + response_body_dict = json.loads(response.content) + response_moderation_id = response_body_dict.get('moderation_id') + moderation_event = ModerationEvent.objects.get( + pk=response_moderation_id + ) + stringified_created_at = moderation_event.created_at.strftime( + '%Y-%m-%dT%H:%M:%S.%f' + ) + 'Z' + + self.assertEqual( + response_body_dict.get('moderation_status'), + 'PENDING' + ) + self.assertEqual( + response_body_dict.get('created_at'), + stringified_created_at + ) + self.assertEqual( + response_body_dict.get('os_id'), + self.production_location.id + ) + self.assertEqual(len(response_body_dict), 4) + + # Now simulate the 31st request, which should be throttled. + throttled_response = self.client.patch( + self.url, + self.common_valid_req_body, + content_type='application/json' + ) + throttled_response_body_dict = json.loads(throttled_response.content) + self.assertEqual(throttled_response.status_code, 429) + self.assertEqual(len(throttled_response_body_dict), 1) + + @override_switch('disable_list_uploading', active=True) + def test_client_cannot_patch_when_upload_is_blocked(self): + expected_error = ( + 'Open Supply Hub is undergoing maintenance and not accepting new ' + 'data at the moment. Please try again in a few minutes.' + ) + + response = self.client.patch( + self.url, + self.common_valid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 503) + + response_body_dict = json.loads(response.content) + error = response_body_dict.get('detail') + self.assertEqual(error, expected_error) + self.assertEqual(len(response_body_dict), 1) + + def test_location_not_found(self): + expected_error = 'The location with the given id was not found.' + url_with_nonexistent_id = reverse( + URLNames.PRODUCTION_LOCATIONS + '-detail', + args=['TT11111111111TT'] + ) + + response = self.client.patch( + url_with_nonexistent_id, + self.common_valid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 404) + + response_body_dict = json.loads(response.content) + error = response_body_dict.get('detail') + self.assertEqual(error, expected_error) + self.assertEqual(len(response_body_dict), 1) + + def test_endpoint_supports_only_dictionary_structure(self): + expected_general_error = ( + 'The request body is invalid.' + ) + expected_specific_error = ( + 'Invalid data. Expected a dictionary (object), but got list.' + ) + expected_error_field = 'non_field_errors' + + response = self.client.patch( + self.url, + [1, 2, 3], + content_type='application/json' + ) + self.assertEqual(response.status_code, 400) + + response_body_dict = json.loads(response.content) + self.assertEqual(len(response_body_dict), 2) + + general_error = response_body_dict['detail'] + errors_list_length = len(response_body_dict['errors']) + specific_error = response_body_dict['errors'][0]['detail'] + error_field = response_body_dict['errors'][0]['field'] + self.assertEqual(general_error, expected_general_error) + self.assertEqual(errors_list_length, 1) + self.assertEqual(specific_error, expected_specific_error) + self.assertEqual(error_field, expected_error_field) + + @patch('api.geocoding.requests.get') + def test_moderation_event_created_with_valid_data( + self, + mock_get): + mock_get.return_value = Mock(ok=True, status_code=200) + mock_get.return_value.json.return_value = geocoding_data + + valid_req_body = json.dumps({ + 'source': 'SLC', + 'name': 'Blue Horizon Facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'country': 'US', + 'location_type': 'Coating', + 'coordinates': { + 'lat': 51.078389, + 'lng': 16.978477 + } + }) + + response = self.client.patch( + self.url, + valid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 202) + + response_body_dict = json.loads(response.content) + response_moderation_id = response_body_dict.get('moderation_id') + moderation_event = ModerationEvent.objects.get( + pk=response_moderation_id + ) + stringified_created_at = moderation_event.created_at.strftime( + '%Y-%m-%dT%H:%M:%S.%f' + ) + 'Z' + + # Check the response. + self.assertEqual( + response_body_dict.get('moderation_status'), + 'PENDING' + ) + self.assertEqual( + response_body_dict.get('created_at'), + stringified_created_at + ) + self.assertEqual( + response_moderation_id, + str(moderation_event.uuid) + ) + self.assertEqual( + response_body_dict.get('os_id'), + self.production_location.id + ) + self.assertEqual(len(response_body_dict), 4) + + @patch('api.geocoding.requests.get') + def test_moderation_event_not_created_with_invalid_data( + self, + mock_get): + mock_get.return_value = Mock(ok=True, status_code=200) + mock_get.return_value.json.return_value = geocoding_data + + expected_response_body = { + 'detail': 'The request body is invalid.', + 'errors': [ + { + 'field': 'sector', + 'detail': ('Expected value for sector to be a string or a ' + "list of strings but got {'some_key': 1135}.") + }, + { + 'field': 'location_type', + 'detail': ( + 'Expected value for location_type to be a ' + 'string or a list of strings but got ' + "{'some_key': 1135}." + ) + } + ] + } + initial_moderation_event_count = ModerationEvent.objects.count() + + invalid_req_body = json.dumps({ + 'source': 'API', + 'name': 'Blue Horizon Facility', + 'address': '990 Spring Garden St., Philadelphia PA 19123', + 'country': 'US', + 'sector': {'some_key': 1135}, + 'parent_company': 'string', + 'product_type': [ + 'string' + ], + 'location_type': {'some_key': 1135}, + 'processing_type': [ + 'string' + ], + 'number_of_workers': { + 'min': 0, + 'max': 0 + }, + 'coordinates': { + 'lat': 10, + 'lng': 20 + } + }) + + response = self.client.patch( + self.url, + invalid_req_body, + content_type='application/json' + ) + self.assertEqual(response.status_code, 422) + + response_body_dict = json.loads(response.content) + self.assertEqual(response_body_dict, expected_response_body) + # Ensure that no ModerationEvent record has been created. + self.assertEqual(ModerationEvent.objects.count(), + initial_moderation_event_count) diff --git a/src/django/api/views/v1/production_locations.py b/src/django/api/views/v1/production_locations.py index 3434305b5..71f3f7ae0 100644 --- a/src/django/api/views/v1/production_locations.py +++ b/src/django/api/views/v1/production_locations.py @@ -236,6 +236,7 @@ def partial_update(self, request, pk=None): return Response( { + 'os_id': result.os.id, 'moderation_id': result.moderation_event.uuid, 'moderation_status': result.moderation_event.status, 'created_at': result.moderation_event.created_at From ebf421b1b01972a80758d4fda0f098374032f44d Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Mon, 16 Dec 2024 18:25:22 +0100 Subject: [PATCH 04/10] Updated release notes --- deployment/clear_opensearch/clear_opensearch.sh.tpl | 3 --- doc/release/RELEASE-NOTES.md | 5 ++++- .../api/tests/test_production_locations_partial_update.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/deployment/clear_opensearch/clear_opensearch.sh.tpl b/deployment/clear_opensearch/clear_opensearch.sh.tpl index 817f141d1..eb2266fe4 100644 --- a/deployment/clear_opensearch/clear_opensearch.sh.tpl +++ b/deployment/clear_opensearch/clear_opensearch.sh.tpl @@ -12,15 +12,12 @@ # This can help speed up the deployment process of new changes. echo -e "\nDelete the custom OpenSearch indexes\n" -curl -X DELETE https://$OPENSEARCH_DOMAIN/production-locations --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" curl -X DELETE https://$OPENSEARCH_DOMAIN/moderation-events --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" echo -e "\nDelete the custom OpenSearch templates\n" -curl -X DELETE https://$OPENSEARCH_DOMAIN/_index_template/production_locations_template --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" curl -X DELETE https://$OPENSEARCH_DOMAIN/_index_template/moderation_events_template --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" echo -e "\nRemove the JDBC input lock files from the EFS storage connected to Logstash\n" sudo mount -t efs -o tls,accesspoint=$EFS_AP_ID $EFS_ID:/ /mnt -sudo rm /mnt/production_locations_jdbc_last_run sudo rm /mnt/moderation_events_jdbc_last_run sudo umount /mnt diff --git a/doc/release/RELEASE-NOTES.md b/doc/release/RELEASE-NOTES.md index d71371e48..5fb2de9c2 100644 --- a/doc/release/RELEASE-NOTES.md +++ b/doc/release/RELEASE-NOTES.md @@ -15,6 +15,8 @@ This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html #### Scheme changes ### Code/API changes +* [OSDEV-1336](https://opensupplyhub.atlassian.net/browse/OSDEV-1336) - Introduced a new PATCH `/api/v1/production-locations/{os_id}/` endpoint based on the API v1 specification. This endpoint allows the creation of a new moderation event for updating the production location with the given details. Basically, the endpoint can be used to contribute to an existing location. +* [OSDEV-1336](https://opensupplyhub.atlassian.net/browse/OSDEV-1336) - Dynamic mapping for the new fields in the `moderation-events` index has been disabled for those that don't have an explicit mapping defined. This change helps avoid indexing conflicts, such as when a field is initially indexed with one data type (e.g., long), but later an entry with a different data type for the same field is indexed, causing the entire entry to fail indexing. After this change, fields with an explicit mapping will be indexed, while other fields will not be indexed or searchable, but will still be displayed in the document. The `clear_opensearch.sh.tpl` script has been edited to clear only resources related to the `moderation-events` index, so there will be no need to wait for the production-locations index to be refilled during deployment, as it will not be deleted. ### Architecture/Environment changes @@ -28,6 +30,7 @@ This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html * Ensure that the following commands are included in the `post_deployment` command: * `migrate` * `reindex_database` +* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `moderation-events` index after disabling dynamic mapping for the new fields that don't have an explicit mapping defined. Note that the `clear_opensearch.sh.tpl` script has been edited to clear only resources related to the `moderation-events` index, so there will be no need to wait for the `production-locations` index to be refilled, as it will not be deleted. ## Release 1.26.0 @@ -105,7 +108,7 @@ This issue has been fixed by adding additional requests to delete the appropriat * Ensure that the following commands are included in the `post_deployment` command: * `migrate` * `reindex_database` -* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `production-locations` and `moderation-events` indexes after fixing the process of clearing the custom OpenSearch indexes. +* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `production-locations` and `moderation-events` indexes after fixing the process of clearing the custom OpenSearch indexes. ## Release 1.25.0 diff --git a/src/django/api/tests/test_production_locations_partial_update.py b/src/django/api/tests/test_production_locations_partial_update.py index 0c9b09762..fe4443757 100644 --- a/src/django/api/tests/test_production_locations_partial_update.py +++ b/src/django/api/tests/test_production_locations_partial_update.py @@ -40,7 +40,7 @@ def setUp(self): self.login(user_email, user_password) # Create a valid Facility entry specifically for this test, to be used - # for making PATCH requests for it. + # for making PATCH requests for this production location. list = FacilityList.objects.create( header='header', file_name='one', name='New List Test' ) From 0563e3e44da1c89ed90089772bafc9eaca82d05d Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Tue, 17 Dec 2024 12:31:27 +0100 Subject: [PATCH 05/10] Trigger CI From 8311cf704b133930128122c7afdbf07bb9e10242 Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Tue, 17 Dec 2024 12:31:54 +0100 Subject: [PATCH 06/10] Trigger CI From e0d79c852ab57500c92124f6d6cc1c7a9e48609e Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Tue, 17 Dec 2024 12:47:40 +0100 Subject: [PATCH 07/10] Trigger CI From 6c14f761e27e43a2b742b2830fc634d6b4b6fa8d Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Tue, 17 Dec 2024 12:49:20 +0100 Subject: [PATCH 08/10] Trigger CI From b523b70ab604477112b81ccbd8e664386a2fa4ca Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Wed, 18 Dec 2024 09:15:43 +0100 Subject: [PATCH 09/10] Reverted changes in clear_opensearch.sh.tpl --- deployment/clear_opensearch/clear_opensearch.sh.tpl | 8 +++----- doc/release/RELEASE-NOTES.md | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/deployment/clear_opensearch/clear_opensearch.sh.tpl b/deployment/clear_opensearch/clear_opensearch.sh.tpl index eb2266fe4..b2ba93770 100644 --- a/deployment/clear_opensearch/clear_opensearch.sh.tpl +++ b/deployment/clear_opensearch/clear_opensearch.sh.tpl @@ -5,19 +5,17 @@ # new index mappings or to refresh the OpenSearch cluster after # restarting Logstash, with the lock files deleted from EFS # storage for each pipeline. -# -# The script can be modified to delete only specific templates, -# indexes, and Logstash pipeline lock files, allowing for a more -# targeted refresh without affecting the entire OpenSearch cluster. -# This can help speed up the deployment process of new changes. echo -e "\nDelete the custom OpenSearch indexes\n" +curl -X DELETE https://$OPENSEARCH_DOMAIN/production-locations --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" curl -X DELETE https://$OPENSEARCH_DOMAIN/moderation-events --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" echo -e "\nDelete the custom OpenSearch templates\n" +curl -X DELETE https://$OPENSEARCH_DOMAIN/_index_template/production_locations_template --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" curl -X DELETE https://$OPENSEARCH_DOMAIN/_index_template/moderation_events_template --aws-sigv4 "aws:amz:eu-west-1:es" --user "$AWS_ACCESS_KEY_ID:$AWS_SECRET_ACCESS_KEY" echo -e "\nRemove the JDBC input lock files from the EFS storage connected to Logstash\n" sudo mount -t efs -o tls,accesspoint=$EFS_AP_ID $EFS_ID:/ /mnt +sudo rm /mnt/production_locations_jdbc_last_run sudo rm /mnt/moderation_events_jdbc_last_run sudo umount /mnt diff --git a/doc/release/RELEASE-NOTES.md b/doc/release/RELEASE-NOTES.md index def869fbb..8715ec77f 100644 --- a/doc/release/RELEASE-NOTES.md +++ b/doc/release/RELEASE-NOTES.md @@ -16,7 +16,7 @@ This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html ### Code/API changes * [OSDEV-1336](https://opensupplyhub.atlassian.net/browse/OSDEV-1336) - Introduced a new PATCH `/api/v1/production-locations/{os_id}/` endpoint based on the API v1 specification. This endpoint allows the creation of a new moderation event for updating the production location with the given details. Basically, the endpoint can be used to contribute to an existing location. -* [OSDEV-1336](https://opensupplyhub.atlassian.net/browse/OSDEV-1336) - Dynamic mapping for the new fields in the `moderation-events` index has been disabled for those that don't have an explicit mapping defined. This change helps avoid indexing conflicts, such as when a field is initially indexed with one data type (e.g., long), but later an entry with a different data type for the same field is indexed, causing the entire entry to fail indexing. After this change, fields with an explicit mapping will be indexed, while other fields will not be indexed or searchable, but will still be displayed in the document. The `clear_opensearch.sh.tpl` script has been edited to clear only resources related to the `moderation-events` index, so there will be no need to wait for the production-locations index to be refilled during deployment, as it will not be deleted. +* [OSDEV-1336](https://opensupplyhub.atlassian.net/browse/OSDEV-1336) - Dynamic mapping for the new fields in the `moderation-events` index has been disabled for those that don't have an explicit mapping defined. This change helps avoid indexing conflicts, such as when a field is initially indexed with one data type (e.g., long), but later an entry with a different data type for the same field is indexed, causing the entire entry to fail indexing. After this change, fields with an explicit mapping will be indexed, while other fields will not be indexed or searchable, but will still be displayed in the document. ### Architecture/Environment changes @@ -33,7 +33,7 @@ This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html * Ensure that the following commands are included in the `post_deployment` command: * `migrate` * `reindex_database` -* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `moderation-events` index after disabling dynamic mapping for the new fields that don't have an explicit mapping defined. Note that the `clear_opensearch.sh.tpl` script has been edited to clear only resources related to the `moderation-events` index, so there will be no need to wait for the `production-locations` index to be refilled, as it will not be deleted. +* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `moderation-events` index after disabling dynamic mapping for the new fields that don't have an explicit mapping defined. ## Release 1.26.0 From d2eeb881fbe6a4be02ef5958f523e744dda3f67b Mon Sep 17 00:00:00 2001 From: vladsha-dev Date: Wed, 18 Dec 2024 09:41:18 +0100 Subject: [PATCH 10/10] Update release notes --- doc/release/RELEASE-NOTES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release/RELEASE-NOTES.md b/doc/release/RELEASE-NOTES.md index 8715ec77f..ee8d5e8e6 100644 --- a/doc/release/RELEASE-NOTES.md +++ b/doc/release/RELEASE-NOTES.md @@ -33,7 +33,7 @@ This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html * Ensure that the following commands are included in the `post_deployment` command: * `migrate` * `reindex_database` -* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `moderation-events` index after disabling dynamic mapping for the new fields that don't have an explicit mapping defined. +* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `moderation-events` index after disabling dynamic mapping for the new fields that don't have an explicit mapping defined. The `production-locations` will also be affected since it will clean all of our custom indexes and templates within the OpenSearch cluster ## Release 1.26.0 @@ -111,7 +111,7 @@ This issue has been fixed by adding additional requests to delete the appropriat * Ensure that the following commands are included in the `post_deployment` command: * `migrate` * `reindex_database` -* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `production-locations` and `moderation-events` indexes after fixing the process of clearing the custom OpenSearch indexes. +* Run `[Release] Deploy` pipeline for the target environment with the flag `Clear the custom OpenSearch indexes and templates` set to true - to refresh the index mappings for the `production-locations` and `moderation-events` indexes after fixing the process of clearing the custom OpenSearch indexes. It will clean all of our custom indexes and templates within the OpenSearch cluster. ## Release 1.25.0