diff --git a/lambdas/ack_backend/src/audit_table.py b/lambdas/ack_backend/src/audit_table.py index 8d6a9e5cf..3283fc3c1 100644 --- a/lambdas/ack_backend/src/audit_table.py +++ b/lambdas/ack_backend/src/audit_table.py @@ -1,8 +1,8 @@ """Add the filename to the audit table and check for duplicates.""" from common.clients import dynamodb_client, logger +from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus from common.models.errors import UnhandledAuditTableError -from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus CONDITION_EXPRESSION = "attribute_exists(message_id)" diff --git a/lambdas/ack_backend/src/constants.py b/lambdas/ack_backend/src/constants.py index cffd6aeac..9236a5e6f 100644 --- a/lambdas/ack_backend/src/constants.py +++ b/lambdas/ack_backend/src/constants.py @@ -1,48 +1,11 @@ """Constants for ack lambda""" -import os - -AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") - COMPLETED_ACK_DIR = "forwardedFile" TEMP_ACK_DIR = "TempAck" BATCH_FILE_PROCESSING_DIR = "processing" BATCH_FILE_ARCHIVE_DIR = "archive" -def get_source_bucket_name() -> str: - """Get the SOURCE_BUCKET_NAME environment from environment variables.""" - return os.getenv("SOURCE_BUCKET_NAME") - - -def get_ack_bucket_name() -> str: - """Get the ACK_BUCKET_NAME environment from environment variables.""" - return os.getenv("ACK_BUCKET_NAME") - - -class FileStatus: - """File status constants""" - - QUEUED = "Queued" - PROCESSING = "Processing" - PROCESSED = "Processed" - DUPLICATE = "Duplicate" - - -class AuditTableKeys: - """Audit table keys""" - - FILENAME = "filename" - MESSAGE_ID = "message_id" - QUEUE_NAME = "queue_name" - RECORD_COUNT = "record_count" - STATUS = "status" - TIMESTAMP = "timestamp" - INGESTION_END_TIME = "ingestion_end_time" - RECORDS_SUCCEEDED = "records_succeeded" - RECORDS_FAILED = "records_failed" - - ACK_HEADERS = [ "MESSAGE_HEADER_ID", "HEADER_RESPONSE_CODE", diff --git a/lambdas/ack_backend/src/update_ack_file.py b/lambdas/ack_backend/src/update_ack_file.py index ec9c55bfb..3ae513214 100644 --- a/lambdas/ack_backend/src/update_ack_file.py +++ b/lambdas/ack_backend/src/update_ack_file.py @@ -12,14 +12,13 @@ ) from common.aws_s3_utils import move_file from common.clients import get_s3_client, logger +from common.models.batch_constants import ACK_BUCKET_NAME, SOURCE_BUCKET_NAME from constants import ( ACK_HEADERS, BATCH_FILE_ARCHIVE_DIR, BATCH_FILE_PROCESSING_DIR, COMPLETED_ACK_DIR, TEMP_ACK_DIR, - get_ack_bucket_name, - get_source_bucket_name, ) from logging_decorators import complete_batch_file_process_logging_decorator @@ -71,10 +70,8 @@ def complete_batch_file_process( the audit table status""" ack_filename = f"{file_key.replace('.csv', f'_BusAck_{created_at_formatted_string}.csv')}" - move_file(get_ack_bucket_name(), f"{TEMP_ACK_DIR}/{ack_filename}", f"{COMPLETED_ACK_DIR}/{ack_filename}") - move_file( - get_source_bucket_name(), f"{BATCH_FILE_PROCESSING_DIR}/{file_key}", f"{BATCH_FILE_ARCHIVE_DIR}/{file_key}" - ) + move_file(ACK_BUCKET_NAME, f"{TEMP_ACK_DIR}/{ack_filename}", f"{COMPLETED_ACK_DIR}/{ack_filename}") + move_file(SOURCE_BUCKET_NAME, f"{BATCH_FILE_PROCESSING_DIR}/{file_key}", f"{BATCH_FILE_ARCHIVE_DIR}/{file_key}") total_ack_rows_processed, total_failures = get_record_count_and_failures_by_message_id(message_id) change_audit_table_status_to_processed(file_key, message_id) @@ -99,7 +96,7 @@ def obtain_current_ack_content(temp_ack_file_key: str) -> StringIO: """Returns the current ack file content if the file exists, or else initialises the content with the ack headers.""" try: # If ack file exists in S3 download the contents - existing_ack_file = get_s3_client().get_object(Bucket=get_ack_bucket_name(), Key=temp_ack_file_key) + existing_ack_file = get_s3_client().get_object(Bucket=ACK_BUCKET_NAME, Key=temp_ack_file_key) existing_content = existing_ack_file["Body"].read().decode("utf-8") except ClientError as error: # If ack file does not exist in S3 create a new file containing the headers only @@ -132,7 +129,6 @@ def update_ack_file( accumulated_csv_content.write(cleaned_row + "\n") csv_file_like_object = BytesIO(accumulated_csv_content.getvalue().encode("utf-8")) - ack_bucket_name = get_ack_bucket_name() - get_s3_client().upload_fileobj(csv_file_like_object, ack_bucket_name, temp_ack_file_key) - logger.info("Ack file updated to %s: %s", ack_bucket_name, completed_ack_file_key) + get_s3_client().upload_fileobj(csv_file_like_object, ACK_BUCKET_NAME, temp_ack_file_key) + logger.info("Ack file updated to %s: %s", ACK_BUCKET_NAME, completed_ack_file_key) diff --git a/lambdas/ack_backend/tests/test_ack_processor.py b/lambdas/ack_backend/tests/test_ack_processor.py index 3aabb158e..7f74d8147 100644 --- a/lambdas/ack_backend/tests/test_ack_processor.py +++ b/lambdas/ack_backend/tests/test_ack_processor.py @@ -63,6 +63,12 @@ def setUp(self) -> None: self.logger_info_patcher = patch("common.log_decorator.logger.info") self.mock_logger_info = self.logger_info_patcher.start() + self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION) + self.ack_bucket_patcher.start() + + self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", BucketNames.SOURCE) + self.source_bucket_patcher.start() + def tearDown(self) -> None: GenericTearDown(self.s3_client, self.firehose_client, self.dynamodb_client) self.mock_logger_info.stop() diff --git a/lambdas/ack_backend/tests/test_audit_table.py b/lambdas/ack_backend/tests/test_audit_table.py index b7d646a51..a218b467b 100644 --- a/lambdas/ack_backend/tests/test_audit_table.py +++ b/lambdas/ack_backend/tests/test_audit_table.py @@ -2,8 +2,8 @@ from unittest.mock import call, patch import audit_table +from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus from common.models.errors import UnhandledAuditTableError -from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus class TestAuditTable(unittest.TestCase): diff --git a/lambdas/ack_backend/tests/test_logging_decorators.py b/lambdas/ack_backend/tests/test_logging_decorators.py index cc97d96f7..e553f923f 100644 --- a/lambdas/ack_backend/tests/test_logging_decorators.py +++ b/lambdas/ack_backend/tests/test_logging_decorators.py @@ -2,6 +2,7 @@ from unittest.mock import patch import logging_decorators +from utils.mock_environment_variables import BucketNames class TestLoggingDecorators(unittest.TestCase): @@ -12,6 +13,9 @@ def setUp(self): self.firehose_patcher = patch("common.log_firehose.firehose_client") self.mock_firehose = self.firehose_patcher.start() + self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", BucketNames.SOURCE) + self.source_bucket_patcher.start() + def tearDown(self): self.logger_patcher.stop() self.firehose_patcher.stop() diff --git a/lambdas/ack_backend/tests/test_splunk_logging.py b/lambdas/ack_backend/tests/test_splunk_logging.py index ea3f10403..5e18ea1f1 100644 --- a/lambdas/ack_backend/tests/test_splunk_logging.py +++ b/lambdas/ack_backend/tests/test_splunk_logging.py @@ -45,6 +45,9 @@ def setUp(self): Body=mock_source_file_with_100_rows.getvalue(), ) + self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION) + self.ack_bucket_patcher.start() + def tearDown(self): GenericTearDown(self.s3_client) diff --git a/lambdas/ack_backend/tests/test_update_ack_file.py b/lambdas/ack_backend/tests/test_update_ack_file.py index fdbefaf89..6b9cd163f 100644 --- a/lambdas/ack_backend/tests/test_update_ack_file.py +++ b/lambdas/ack_backend/tests/test_update_ack_file.py @@ -57,6 +57,9 @@ def setUp(self) -> None: self.logger_patcher = patch("update_ack_file.logger") self.mock_logger = self.logger_patcher.start() + self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION) + self.ack_bucket_patcher.start() + def tearDown(self) -> None: GenericTearDown(self.s3_client) diff --git a/lambdas/ack_backend/tests/test_update_ack_file_flow.py b/lambdas/ack_backend/tests/test_update_ack_file_flow.py index 35cae061c..9fd78ca4b 100644 --- a/lambdas/ack_backend/tests/test_update_ack_file_flow.py +++ b/lambdas/ack_backend/tests/test_update_ack_file_flow.py @@ -5,6 +5,7 @@ from moto import mock_aws import update_ack_file +from utils.mock_environment_variables import BucketNames @mock_aws @@ -12,16 +13,13 @@ class TestUpdateAckFileFlow(unittest.TestCase): def setUp(self): self.s3_client = boto3.client("s3", region_name="eu-west-2") - self.ack_bucket_name = "my-ack-bucket" - self.source_bucket_name = "my-source-bucket" - self.ack_bucket_patcher = patch("update_ack_file.get_ack_bucket_name", return_value=self.ack_bucket_name) - self.mock_get_ack_bucket_name = self.ack_bucket_patcher.start() + self.ack_bucket_name = BucketNames.DESTINATION + self.source_bucket_name = BucketNames.SOURCE + self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", self.ack_bucket_name) + self.ack_bucket_patcher.start() - self.source_bucket_patcher = patch( - "update_ack_file.get_source_bucket_name", - return_value=self.source_bucket_name, - ) - self.mock_get_source_bucket_name = self.source_bucket_patcher.start() + self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", self.source_bucket_name) + self.source_bucket_patcher.start() self.s3_client.create_bucket( Bucket=self.ack_bucket_name, diff --git a/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py b/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py index afb815631..64ce361d3 100644 --- a/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py +++ b/lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py @@ -1,6 +1,6 @@ """Generic setup and teardown for ACK backend tests""" -from constants import AuditTableKeys +from common.models.batch_constants import AuditTableKeys from tests.utils.mock_environment_variables import AUDIT_TABLE_NAME, REGION_NAME, BucketNames, Firehose diff --git a/lambdas/backend/src/authorisation/authoriser.py b/lambdas/backend/src/authorisation/authoriser.py index 4e92647b9..2a29ebefb 100644 --- a/lambdas/backend/src/authorisation/authoriser.py +++ b/lambdas/backend/src/authorisation/authoriser.py @@ -4,7 +4,7 @@ from authorisation.api_operation_code import ApiOperationCode from common.clients import logger -from common.models.constants import SUPPLIER_PERMISSIONS_HASH_KEY +from common.models.constants import RedisHashKeys from common.redis_client import get_redis_client @@ -33,7 +33,7 @@ def _expand_permissions( return expanded_permissions def _get_supplier_permissions(self, supplier_system: str) -> dict[str, list[ApiOperationCode]]: - raw_permissions_data = get_redis_client().hget(SUPPLIER_PERMISSIONS_HASH_KEY, supplier_system) + raw_permissions_data = get_redis_client().hget(RedisHashKeys.SUPPLIER_PERMISSIONS_HASH_KEY, supplier_system) permissions_data = json.loads(raw_permissions_data) if raw_permissions_data else [] return self._expand_permissions(permissions_data) diff --git a/lambdas/backend/src/controller/parameter_parser.py b/lambdas/backend/src/controller/parameter_parser.py index c202760c5..3c0d6e5a8 100644 --- a/lambdas/backend/src/controller/parameter_parser.py +++ b/lambdas/backend/src/controller/parameter_parser.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from typing import Optional -from common.models.constants import Constants +from common.models.constants import RedisHashKeys from common.models.utils.generic_utils import nhs_number_mod11_check from common.redis_client import get_redis_client from controller.constants import IdentifierSearchElement, IdentifierSearchParameterName, ImmunizationSearchParameterName @@ -81,7 +81,7 @@ def process_immunization_target(imms_params: dict[str, list[str]]) -> set[str]: f"Search parameter {ImmunizationSearchParameterName.IMMUNIZATION_TARGET} must have one or more values." ) - valid_vaccine_types = get_redis_client().hkeys(Constants.VACCINE_TYPE_TO_DISEASES_HASH_KEY) + valid_vaccine_types = get_redis_client().hkeys(RedisHashKeys.VACCINE_TYPE_TO_DISEASES_HASH_KEY) if any(x not in valid_vaccine_types for x in vaccine_types): raise ParameterExceptionError( f"{ImmunizationSearchParameterName.IMMUNIZATION_TARGET} must be one or more of the following: " diff --git a/lambdas/backend/src/filter.py b/lambdas/backend/src/filter.py index 0b47f6432..233c1f165 100644 --- a/lambdas/backend/src/filter.py +++ b/lambdas/backend/src/filter.py @@ -29,7 +29,7 @@ def create_reference_to_patient_resource(patient_full_url: str, patient: dict) - Returns a reference to the given patient which includes the patient nhs number identifier (system and value fields only) and a reference to patient full url. "Type" field is set to "Patient". """ - patient_nhs_number_identifier = [x for x in patient["identifier"] if x.get("system") == Urls.nhs_number][0] + patient_nhs_number_identifier = [x for x in patient["identifier"] if x.get("system") == Urls.NHS_NUMBER][0] return { "reference": patient_full_url, @@ -67,9 +67,9 @@ def replace_organization_values(imms: dict) -> dict: identifier = performer["actor"].get("identifier", {}) if identifier.get("value") is not None: identifier["value"] = "N2N9I" - identifier["system"] = Urls.ods_organization_code + identifier["system"] = Urls.ODS_ORGANIZATION_CODE if identifier.get("system") is not None: - identifier["system"] = Urls.ods_organization_code + identifier["system"] = Urls.ODS_ORGANIZATION_CODE # Ensure only 'system' and 'value' remain in identifier keys = {"system", "value"} diff --git a/lambdas/backend/tests/test_filter.py b/lambdas/backend/tests/test_filter.py index 758f5adff..740bf26d7 100644 --- a/lambdas/backend/tests/test_filter.py +++ b/lambdas/backend/tests/test_filter.py @@ -109,7 +109,7 @@ def test_replace_organization_values(self): # Prepare the input data input_imms = load_json_data("completed_covid_immunization_event.json") # Change the input data's organization_identifier_system to be something other than the ods url - input_imms["performer"][1]["actor"]["identifier"]["system"] = Urls.urn_school_number + input_imms["performer"][1]["actor"]["identifier"]["system"] = Urls.URN_SCHOOL_NUMBER # Add organization_display to the input data (note that whilst this field is not one of the expected fields, # the validator does not prevent it from being included on a create or update, so the possiblity of it # existing must be handled) diff --git a/lambdas/batch_processor_filter/src/batch_audit_repository.py b/lambdas/batch_processor_filter/src/batch_audit_repository.py index 3ae054fab..134d7a343 100644 --- a/lambdas/batch_processor_filter/src/batch_audit_repository.py +++ b/lambdas/batch_processor_filter/src/batch_audit_repository.py @@ -1,12 +1,10 @@ from boto3.dynamodb.conditions import Key from common.aws_dynamodb import get_dynamodb_table +from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus from constants import ( AUDIT_TABLE_FILENAME_GSI, - AUDIT_TABLE_NAME, AUDIT_TABLE_QUEUE_NAME_GSI, - AuditTableKeys, - FileStatus, ) diff --git a/lambdas/batch_processor_filter/src/batch_file_repository.py b/lambdas/batch_processor_filter/src/batch_file_repository.py index 58a756cba..4dc137ce5 100644 --- a/lambdas/batch_processor_filter/src/batch_file_repository.py +++ b/lambdas/batch_processor_filter/src/batch_file_repository.py @@ -5,7 +5,7 @@ from batch_file_created_event import BatchFileCreatedEvent from common.clients import get_s3_client -from constants import ACK_BUCKET_NAME, SOURCE_BUCKET_NAME +from common.models.batch_constants import ACK_BUCKET_NAME, SOURCE_BUCKET_NAME class BatchFileRepository: diff --git a/lambdas/batch_processor_filter/src/batch_processor_filter_service.py b/lambdas/batch_processor_filter/src/batch_processor_filter_service.py index 73531dabc..b970cad60 100644 --- a/lambdas/batch_processor_filter/src/batch_processor_filter_service.py +++ b/lambdas/batch_processor_filter/src/batch_processor_filter_service.py @@ -7,7 +7,8 @@ from batch_file_repository import BatchFileRepository from common.clients import get_sqs_client, logger from common.log_firehose import send_log_to_firehose -from constants import QUEUE_URL, SPLUNK_FIREHOSE_STREAM_NAME, FileNotProcessedReason, FileStatus +from common.models.batch_constants import FileNotProcessedReason, FileStatus +from constants import QUEUE_URL, SPLUNK_FIREHOSE_STREAM_NAME from exceptions import EventAlreadyProcessingForSupplierAndVaccTypeError BATCH_AUDIT_REPOSITORY = BatchAuditRepository() diff --git a/lambdas/batch_processor_filter/src/constants.py b/lambdas/batch_processor_filter/src/constants.py index 8ad9b8420..df9070294 100644 --- a/lambdas/batch_processor_filter/src/constants.py +++ b/lambdas/batch_processor_filter/src/constants.py @@ -1,38 +1,6 @@ import os -from enum import StrEnum -AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") AUDIT_TABLE_FILENAME_GSI = os.getenv("FILE_NAME_GSI") AUDIT_TABLE_QUEUE_NAME_GSI = os.getenv("QUEUE_NAME_GSI") QUEUE_URL = os.getenv("QUEUE_URL") SPLUNK_FIREHOSE_STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME") -SOURCE_BUCKET_NAME = os.getenv("SOURCE_BUCKET_NAME") -ACK_BUCKET_NAME = os.getenv("ACK_BUCKET_NAME") - - -class FileStatus(StrEnum): - """File status constants""" - - QUEUED = "Queued" - PROCESSING = "Processing" - PREPROCESSED = "Preprocessed" - PROCESSED = "Processed" - NOT_PROCESSED = "Not processed" - FAILED = "Failed" - - -class FileNotProcessedReason(StrEnum): - """Reasons why a file was not processed""" - - DUPLICATE = "Duplicate" - - -class AuditTableKeys(StrEnum): - """Audit table keys""" - - FILENAME = "filename" - MESSAGE_ID = "message_id" - QUEUE_NAME = "queue_name" - STATUS = "status" - TIMESTAMP = "timestamp" - ERROR_DETAILS = "error_details" diff --git a/lambdas/batch_processor_filter/tests/test_lambda_handler.py b/lambdas/batch_processor_filter/tests/test_lambda_handler.py index 3ef9522cb..ecf7144e8 100644 --- a/lambdas/batch_processor_filter/tests/test_lambda_handler.py +++ b/lambdas/batch_processor_filter/tests/test_lambda_handler.py @@ -22,13 +22,11 @@ with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from common.clients import REGION_NAME + from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus from constants import ( AUDIT_TABLE_FILENAME_GSI, - AUDIT_TABLE_NAME, AUDIT_TABLE_QUEUE_NAME_GSI, SPLUNK_FIREHOSE_STREAM_NAME, - AuditTableKeys, - FileStatus, ) from lambda_handler import lambda_handler diff --git a/lambdas/filenameprocessor/src/audit_table.py b/lambdas/filenameprocessor/src/audit_table.py index aeef9af51..a7bcb9d68 100644 --- a/lambdas/filenameprocessor/src/audit_table.py +++ b/lambdas/filenameprocessor/src/audit_table.py @@ -3,8 +3,8 @@ from typing import Optional from common.clients import dynamodb_client, logger +from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys from common.models.errors import UnhandledAuditTableError -from constants import AUDIT_TABLE_NAME, AuditTableKeys def upsert_audit_table( diff --git a/lambdas/filenameprocessor/src/constants.py b/lambdas/filenameprocessor/src/constants.py index 67d013ad3..dbe2be774 100644 --- a/lambdas/filenameprocessor/src/constants.py +++ b/lambdas/filenameprocessor/src/constants.py @@ -1,7 +1,6 @@ """Constants for the filenameprocessor lambda""" import os -from enum import StrEnum from common.models.errors import UnhandledAuditTableError from models.errors import ( @@ -10,17 +9,12 @@ VaccineTypePermissionsError, ) -SOURCE_BUCKET_NAME = os.getenv("SOURCE_BUCKET_NAME") - - DPS_DESTINATION_BUCKET_NAME = os.getenv("DPS_BUCKET_NAME") EXPECTED_SOURCE_BUCKET_ACCOUNT = os.getenv("ACCOUNT_ID") EXPECTED_DPS_DESTINATION_ACCOUNT = os.getenv("DPS_ACCOUNT_ID") -AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") AUDIT_TABLE_TTL_DAYS = os.getenv("AUDIT_TABLE_TTL_DAYS") VALID_VERSIONS = ["V5"] -VACCINE_TYPE_TO_DISEASES_HASH_KEY = "vacc_to_diseases" ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY = "ods_code_to_supplier" EXTENDED_ATTRIBUTES_FILE_PREFIX = "Vaccination_Extended_Attributes" @@ -41,38 +35,3 @@ # Filename timestamp constants VALID_TIMESTAMP_LENGTH = 17 VALID_TIMEZONE_OFFSETS = {"00", "01"} - - -class FileStatus(StrEnum): - """File status constants""" - - QUEUED = "Queued" - PROCESSING = "Processing" - PROCESSED = "Processed" - NOT_PROCESSED = "Not processed" - FAILED = "Failed" - - -class FileNotProcessedReason(StrEnum): - """Reasons why a file was not processed""" - - EMPTY = "Empty file" - UNAUTHORISED = "Unauthorised" - - -class AuditTableKeys(StrEnum): - """Audit table keys""" - - FILENAME = "filename" - MESSAGE_ID = "message_id" - QUEUE_NAME = "queue_name" - STATUS = "status" - TIMESTAMP = "timestamp" - EXPIRES_AT = "expires_at" - ERROR_DETAILS = "error_details" - - -class Operation(str): - CREATE = "C" - UPDATE = "U" - DELETE = "D" diff --git a/lambdas/filenameprocessor/src/elasticache.py b/lambdas/filenameprocessor/src/elasticache.py index 6dd14e357..4166c03ec 100644 --- a/lambdas/filenameprocessor/src/elasticache.py +++ b/lambdas/filenameprocessor/src/elasticache.py @@ -1,21 +1,18 @@ import json -from common.models.constants import SUPPLIER_PERMISSIONS_HASH_KEY +from common.models.constants import RedisHashKeys from common.redis_client import get_redis_client -from constants import ( - ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY, - VACCINE_TYPE_TO_DISEASES_HASH_KEY, -) +from constants import ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY def get_supplier_permissions_from_cache(supplier_system: str) -> list[str]: """Gets and returns the permissions config file content from ElastiCache (Redis).""" - permissions_str = get_redis_client().hget(SUPPLIER_PERMISSIONS_HASH_KEY, supplier_system) + permissions_str = get_redis_client().hget(RedisHashKeys.SUPPLIER_PERMISSIONS_HASH_KEY, supplier_system) return json.loads(permissions_str) if permissions_str else [] def get_valid_vaccine_types_from_cache() -> list[str]: - return get_redis_client().hkeys(VACCINE_TYPE_TO_DISEASES_HASH_KEY) + return get_redis_client().hkeys(RedisHashKeys.VACCINE_TYPE_TO_DISEASES_HASH_KEY) def get_supplier_system_from_cache(ods_code: str) -> str: diff --git a/lambdas/filenameprocessor/src/file_name_processor.py b/lambdas/filenameprocessor/src/file_name_processor.py index 13ffc8b98..26602543d 100644 --- a/lambdas/filenameprocessor/src/file_name_processor.py +++ b/lambdas/filenameprocessor/src/file_name_processor.py @@ -15,6 +15,7 @@ ) from common.clients import STREAM_NAME, get_s3_client, logger from common.log_decorator import logging_decorator +from common.models.batch_constants import SOURCE_BUCKET_NAME, FileNotProcessedReason, FileStatus from common.models.errors import UnhandledAuditTableError from constants import ( DPS_DESTINATION_BUCKET_NAME, @@ -24,9 +25,6 @@ EXPECTED_SOURCE_BUCKET_ACCOUNT, EXTENDED_ATTRIBUTES_ARCHIVE_PREFIX, EXTENDED_ATTRIBUTES_FILE_PREFIX, - SOURCE_BUCKET_NAME, - FileNotProcessedReason, - FileStatus, ) from file_validation import is_file_in_directory_root, validate_batch_file_key, validate_extended_attributes_file_key from make_and_upload_ack_file import make_and_upload_the_ack_file diff --git a/lambdas/filenameprocessor/src/supplier_permissions.py b/lambdas/filenameprocessor/src/supplier_permissions.py index 48f1f91fc..617305ef8 100644 --- a/lambdas/filenameprocessor/src/supplier_permissions.py +++ b/lambdas/filenameprocessor/src/supplier_permissions.py @@ -1,7 +1,7 @@ """Functions for fetching supplier permissions""" from common.clients import logger -from constants import Operation +from common.models.batch_constants import OperationShortCode from elasticache import get_supplier_permissions_from_cache, get_supplier_system_from_cache from models.errors import VaccineTypePermissionsError @@ -28,9 +28,9 @@ def validate_permissions_for_extended_attributes_files(vaccine_type: str, ods_co Raises an exception if the supplier does not have at least one permission for the vaccine type. """ allowed_operations = { - Operation.CREATE, - Operation.UPDATE, - Operation.DELETE, + OperationShortCode.CREATE, + OperationShortCode.UPDATE, + OperationShortCode.DELETE, } supplier = get_supplier_system_from_cache(ods_code) supplier_permissions = get_supplier_permissions_from_cache(supplier) diff --git a/lambdas/filenameprocessor/tests/test_audit_table.py b/lambdas/filenameprocessor/tests/test_audit_table.py index 093eef716..87c4d4f86 100644 --- a/lambdas/filenameprocessor/tests/test_audit_table.py +++ b/lambdas/filenameprocessor/tests/test_audit_table.py @@ -18,8 +18,8 @@ with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from audit_table import upsert_audit_table from common.clients import REGION_NAME + from common.models.batch_constants import AUDIT_TABLE_NAME, FileStatus from common.models.errors import UnhandledAuditTableError - from constants import AUDIT_TABLE_NAME, FileStatus dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) diff --git a/lambdas/filenameprocessor/tests/test_lambda_handler.py b/lambdas/filenameprocessor/tests/test_lambda_handler.py index 596cf65be..9ee44bd79 100644 --- a/lambdas/filenameprocessor/tests/test_lambda_handler.py +++ b/lambdas/filenameprocessor/tests/test_lambda_handler.py @@ -34,12 +34,8 @@ # Ensure environment variables are mocked before importing from src files with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): from common.clients import REGION_NAME - from constants import ( - AUDIT_TABLE_NAME, - EXTENDED_ATTRIBUTES_VACC_TYPE, - AuditTableKeys, - FileStatus, - ) + from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus + from constants import EXTENDED_ATTRIBUTES_VACC_TYPE from file_name_processor import handle_record, lambda_handler s3_client = boto3_client("s3", region_name=REGION_NAME) diff --git a/lambdas/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py b/lambdas/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py index 14e8927da..43af8b35b 100644 --- a/lambdas/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py +++ b/lambdas/filenameprocessor/tests/utils_for_tests/utils_for_filenameprocessor_tests.py @@ -18,13 +18,9 @@ from csv import DictReader from common.clients import REGION_NAME - from common.models.constants import SUPPLIER_PERMISSIONS_HASH_KEY - from constants import ( - AUDIT_TABLE_NAME, - ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY, - AuditTableKeys, - FileStatus, - ) + from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus + from common.models.constants import RedisHashKeys + from constants import ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY MOCK_ODS_CODE_TO_SUPPLIER = {"YGM41": "EMIS", "X8E5B": "RAVS"} @@ -63,7 +59,7 @@ def create_mock_hget( def mock_hget(key, field): if key == ODS_CODE_TO_SUPPLIER_SYSTEM_HASH_KEY: return mock_ods_code_to_supplier.get(field) - if key == SUPPLIER_PERMISSIONS_HASH_KEY: + if key == RedisHashKeys.SUPPLIER_PERMISSIONS_HASH_KEY: return mock_supplier_permissions.get(field) return None diff --git a/lambdas/filenameprocessor/tests/utils_for_tests/values_for_tests.py b/lambdas/filenameprocessor/tests/utils_for_tests/values_for_tests.py index e15d41cd3..481dd2d20 100644 --- a/lambdas/filenameprocessor/tests/utils_for_tests/values_for_tests.py +++ b/lambdas/filenameprocessor/tests/utils_for_tests/values_for_tests.py @@ -6,7 +6,7 @@ from utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import AuditTableKeys + from common.models.batch_constants import AuditTableKeys fixed_datetime = datetime(2024, 10, 29, 12, 0, 0) diff --git a/lambdas/recordprocessor/src/audit_table.py b/lambdas/recordprocessor/src/audit_table.py index 00a1f4cff..a6e7c59b7 100644 --- a/lambdas/recordprocessor/src/audit_table.py +++ b/lambdas/recordprocessor/src/audit_table.py @@ -4,8 +4,8 @@ from typing import Optional from common.clients import dynamodb_client, logger +from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys from common.models.errors import UnhandledAuditTableError -from constants import AUDIT_TABLE_NAME, AuditTableKeys def update_audit_table_status( diff --git a/lambdas/recordprocessor/src/batch_processor.py b/lambdas/recordprocessor/src/batch_processor.py index ac9aa60cd..659e05a6f 100644 --- a/lambdas/recordprocessor/src/batch_processor.py +++ b/lambdas/recordprocessor/src/batch_processor.py @@ -11,13 +11,8 @@ from common.aws_s3_utils import move_file from common.batch.eof_utils import make_batch_eof_message from common.clients import logger -from constants import ( - ARCHIVE_DIR_NAME, - PROCESSING_DIR_NAME, - SOURCE_BUCKET_NAME, - FileNotProcessedReason, - FileStatus, -) +from common.models.batch_constants import SOURCE_BUCKET_NAME, FileNotProcessedReason, FileStatus +from constants import ARCHIVE_DIR_NAME, PROCESSING_DIR_NAME from file_level_validation import file_is_empty, file_level_validation from mappings import map_target_disease from process_row import process_row diff --git a/lambdas/recordprocessor/src/constants.py b/lambdas/recordprocessor/src/constants.py index d3a28aa88..6b9e53ea6 100644 --- a/lambdas/recordprocessor/src/constants.py +++ b/lambdas/recordprocessor/src/constants.py @@ -1,14 +1,5 @@ """Constants for recordprocessor""" -import os -from enum import StrEnum - -# Once Python projects are moved to /lambdas consider consolidating constants common to batch in -# /shared/src/common/constants/batch_constants.py (VED-881) -SOURCE_BUCKET_NAME = os.getenv("SOURCE_BUCKET_NAME") -ACK_BUCKET_NAME = os.getenv("ACK_BUCKET_NAME") -AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") - ARCHIVE_DIR_NAME = "archive" PROCESSING_DIR_NAME = "processing" @@ -55,37 +46,6 @@ EMIS_V5_SUPPLIER_IDENTIFIER_SYSTEM = "https://emishealth.com/identifiers/vacc" -class FileStatus: - """File status constants""" - - QUEUED = "Queued" - PROCESSING = "Processing" - PREPROCESSED = "Preprocessed" # All entries in file converted to FHIR and forwarded to Kinesis - PROCESSED = "Processed" # All entries processed and ack file created - NOT_PROCESSED = "Not processed" - FAILED = "Failed" - - -class FileNotProcessedReason(StrEnum): - """Reasons why a file was not processed""" - - UNAUTHORISED = "Unauthorised" - EMPTY = "Empty file" - - -class AuditTableKeys: - """Audit table keys""" - - FILENAME = "filename" - MESSAGE_ID = "message_id" - QUEUE_NAME = "queue_name" - RECORD_COUNT = "record_count" - STATUS = "status" - TIMESTAMP = "timestamp" - INGESTION_START_TIME = "ingestion_start_time" - ERROR_DETAILS = "error_details" - - class Diagnostics: """Diagnostics messages""" @@ -95,31 +55,3 @@ class Diagnostics: UNABLE_TO_OBTAIN_IMMS_ID = "Unable to obtain imms event id" UNABLE_TO_OBTAIN_VERSION = "Unable to obtain current imms event version" INVALID_CONVERSION = "Unable to convert row to FHIR Immunization Resource JSON format" - - -class Urls: - """Urls""" - - SNOMED = "http://snomed.info/sct" # NOSONAR(S5332) - NHS_NUMBER = "https://fhir.nhs.uk/Id/nhs-number" - NULL_FLAVOUR_CODES = "http://terminology.hl7.org/CodeSystem/v3-NullFlavor" # NOSONAR(S5332) - VACCINATION_PROCEDURE = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure" - - -class Operation(StrEnum): - CREATE = "CREATE" - UPDATE = "UPDATE" - DELETE = "DELETE" - - -class Permission(StrEnum): - CREATE = "C" - UPDATE = "U" - DELETE = "D" - - -permission_to_operation_map = { - Permission.CREATE: Operation.CREATE, - Permission.UPDATE: Operation.UPDATE, - Permission.DELETE: Operation.DELETE, -} diff --git a/lambdas/recordprocessor/src/convert_to_fhir_imms_resource.py b/lambdas/recordprocessor/src/convert_to_fhir_imms_resource.py index 0731c3562..34cc361b0 100644 --- a/lambdas/recordprocessor/src/convert_to_fhir_imms_resource.py +++ b/lambdas/recordprocessor/src/convert_to_fhir_imms_resource.py @@ -2,7 +2,8 @@ from typing import Callable, Dict, List -from constants import Operation, Urls +from common.models.batch_constants import Operation +from common.models.constants import Urls from utils_for_fhir_conversion import Add, Convert, Generate, _is_not_empty ImmunizationDecorator = Callable[[Dict, Dict[str, str]], None] diff --git a/lambdas/recordprocessor/src/file_level_validation.py b/lambdas/recordprocessor/src/file_level_validation.py index 58e1d89ac..54066e111 100644 --- a/lambdas/recordprocessor/src/file_level_validation.py +++ b/lambdas/recordprocessor/src/file_level_validation.py @@ -9,16 +9,14 @@ from audit_table import set_audit_table_ingestion_start_time, update_audit_table_status from common.aws_s3_utils import move_file from common.clients import logger -from constants import ( - ARCHIVE_DIR_NAME, - EXPECTED_CSV_HEADERS, - PROCESSING_DIR_NAME, +from common.models.batch_constants import ( SOURCE_BUCKET_NAME, FileNotProcessedReason, FileStatus, - Permission, + OperationShortCode, permission_to_operation_map, ) +from constants import ARCHIVE_DIR_NAME, EXPECTED_CSV_HEADERS, PROCESSING_DIR_NAME from logging_decorator import file_level_validation_logging_decorator from make_and_upload_ack_file import make_and_upload_ack_file from models.errors import InvalidHeaders, NoOperationPermissions @@ -46,10 +44,10 @@ def get_permitted_operations(supplier: str, vaccine_type: str, allowed_permissio # Extract permissions letters to get map key from the allowed vaccine type permissions_for_vaccine_type = { - Permission(permission) + OperationShortCode(permission) for permission_str in permission_strs_for_vaccine_type for permission in permission_str.split(".")[1].upper() - if permission in list(Permission) + if permission in list(OperationShortCode) } # Map Permission key to action flag diff --git a/lambdas/recordprocessor/src/make_and_upload_ack_file.py b/lambdas/recordprocessor/src/make_and_upload_ack_file.py index 95c549f24..48706d998 100644 --- a/lambdas/recordprocessor/src/make_and_upload_ack_file.py +++ b/lambdas/recordprocessor/src/make_and_upload_ack_file.py @@ -4,7 +4,7 @@ from io import BytesIO, StringIO from common.clients import get_s3_client -from constants import ACK_BUCKET_NAME +from common.models.batch_constants import ACK_BUCKET_NAME def make_ack_data( diff --git a/lambdas/recordprocessor/src/mappings.py b/lambdas/recordprocessor/src/mappings.py index 2420fd122..c20266288 100644 --- a/lambdas/recordprocessor/src/mappings.py +++ b/lambdas/recordprocessor/src/mappings.py @@ -2,13 +2,13 @@ import json +from common.models.constants import RedisHashKeys, Urls from common.redis_client import get_redis_client -from constants import Urls def map_target_disease(vaccine: str) -> list: """Returns the target disease element for the given vaccine type using the vaccine_disease_mapping""" - diseases_str = get_redis_client().hget("vacc_to_diseases", vaccine) + diseases_str = get_redis_client().hget(RedisHashKeys.VACCINE_TYPE_TO_DISEASES_HASH_KEY, vaccine) diseases = json.loads(diseases_str) if diseases_str else [] return [ { diff --git a/lambdas/recordprocessor/src/utils_for_fhir_conversion.py b/lambdas/recordprocessor/src/utils_for_fhir_conversion.py index 8e91ed63a..395b73f4b 100644 --- a/lambdas/recordprocessor/src/utils_for_fhir_conversion.py +++ b/lambdas/recordprocessor/src/utils_for_fhir_conversion.py @@ -4,7 +4,7 @@ from datetime import datetime from decimal import Decimal, InvalidOperation -from constants import Urls +from common.models.constants import Urls def _is_not_empty(value: any) -> bool: diff --git a/lambdas/recordprocessor/tests/test_audit_table.py b/lambdas/recordprocessor/tests/test_audit_table.py index 29358df96..2e63f71fe 100644 --- a/lambdas/recordprocessor/tests/test_audit_table.py +++ b/lambdas/recordprocessor/tests/test_audit_table.py @@ -28,11 +28,7 @@ update_audit_table_status, ) from common.clients import REGION_NAME - from constants import ( - AUDIT_TABLE_NAME, - FileStatus, - ) - + from common.models.batch_constants import AUDIT_TABLE_NAME, FileStatus dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) diff --git a/lambdas/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py b/lambdas/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py index 4a8bf0e97..c9f161714 100644 --- a/lambdas/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py +++ b/lambdas/recordprocessor/tests/test_convert_to_fhir_imms_decorators.py @@ -24,7 +24,7 @@ ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import Urls + from common.models.constants import Urls from convert_to_fhir_imms_resource import ( _decorate_immunization, _decorate_patient, diff --git a/lambdas/recordprocessor/tests/test_process_csv_to_fhir.py b/lambdas/recordprocessor/tests/test_process_csv_to_fhir.py index 6919bdbc2..b1fc8ea2c 100644 --- a/lambdas/recordprocessor/tests/test_process_csv_to_fhir.py +++ b/lambdas/recordprocessor/tests/test_process_csv_to_fhir.py @@ -25,7 +25,7 @@ with patch("os.environ", MOCK_ENVIRONMENT_DICT): from batch_processor import process_csv_to_fhir - from constants import AUDIT_TABLE_NAME, FileStatus + from common.models.batch_constants import AUDIT_TABLE_NAME, FileStatus dynamodb_client = boto3.client("dynamodb", region_name=REGION_NAME) s3_client = boto3.client("s3", region_name=REGION_NAME) diff --git a/lambdas/recordprocessor/tests/test_recordprocessor_main.py b/lambdas/recordprocessor/tests/test_recordprocessor_main.py index 9e935ea3e..ef1e61d7b 100644 --- a/lambdas/recordprocessor/tests/test_recordprocessor_main.py +++ b/lambdas/recordprocessor/tests/test_recordprocessor_main.py @@ -36,13 +36,8 @@ with patch("os.environ", MOCK_ENVIRONMENT_DICT): from batch_processor import main - from constants import ( - AUDIT_TABLE_NAME, - AuditTableKeys, - Diagnostics, - FileNotProcessedReason, - FileStatus, - ) + from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileNotProcessedReason, FileStatus + from constants import Diagnostics s3_client = boto3_client("s3", region_name=REGION_NAME) kinesis_client = boto3_client("kinesis", region_name=REGION_NAME) diff --git a/lambdas/recordprocessor/tests/test_utils_for_fhir_conversion.py b/lambdas/recordprocessor/tests/test_utils_for_fhir_conversion.py index 7b77b2ad5..5c46f5828 100644 --- a/lambdas/recordprocessor/tests/test_utils_for_fhir_conversion.py +++ b/lambdas/recordprocessor/tests/test_utils_for_fhir_conversion.py @@ -9,7 +9,7 @@ ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import Urls + from common.models.constants import Urls from utils_for_fhir_conversion import Add, Convert, Generate, _is_not_empty diff --git a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py index eb391411c..96716fe31 100644 --- a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py +++ b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/decorator_constants.py @@ -11,7 +11,7 @@ ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import Urls + from common.models.constants import Urls VALID_NHS_NUMBER = "1345678940" diff --git a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py index e1279ae8e..e52d587ab 100644 --- a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py +++ b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py @@ -23,10 +23,7 @@ from csv import DictReader from common.clients import REGION_NAME - from constants import ( - AUDIT_TABLE_NAME, - AuditTableKeys, - ) + from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) diff --git a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py index 8038bcc36..287e69c20 100644 --- a/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py +++ b/lambdas/recordprocessor/tests/utils_for_recordprocessor_tests/values_for_recordprocessor_tests.py @@ -9,7 +9,8 @@ ) with patch("os.environ", MOCK_ENVIRONMENT_DICT): - from constants import AuditTableKeys, Urls + from common.models.batch_constants import AuditTableKeys + from common.models.constants import Urls REGION_NAME = "eu-west-2" diff --git a/lambdas/shared/src/common/models/batch_constants.py b/lambdas/shared/src/common/models/batch_constants.py new file mode 100644 index 000000000..5634a74f7 --- /dev/null +++ b/lambdas/shared/src/common/models/batch_constants.py @@ -0,0 +1,61 @@ +import os +from enum import StrEnum + +ACK_BUCKET_NAME = os.getenv("ACK_BUCKET_NAME") +AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME") +SOURCE_BUCKET_NAME = os.getenv("SOURCE_BUCKET_NAME") + + +class FileStatus(StrEnum): + """File status constants""" + + QUEUED = "Queued" + PROCESSING = "Processing" + PREPROCESSED = "Preprocessed" + PROCESSED = "Processed" + NOT_PROCESSED = "Not processed" + FAILED = "Failed" + + +class FileNotProcessedReason(StrEnum): + """Reasons why a file was not processed""" + + DUPLICATE = "Duplicate" + EMPTY = "Empty file" + UNAUTHORISED = "Unauthorised" + + +class AuditTableKeys(StrEnum): + """Audit table keys""" + + FILENAME = "filename" + MESSAGE_ID = "message_id" + QUEUE_NAME = "queue_name" + RECORD_COUNT = "record_count" + STATUS = "status" + TIMESTAMP = "timestamp" + EXPIRES_AT = "expires_at" + INGESTION_START_TIME = "ingestion_start_time" + INGESTION_END_TIME = "ingestion_end_time" + RECORDS_SUCCEEDED = "records_succeeded" + RECORDS_FAILED = "records_failed" + ERROR_DETAILS = "error_details" + + +class Operation(StrEnum): + CREATE = "CREATE" + UPDATE = "UPDATE" + DELETE = "DELETE" + + +class OperationShortCode(StrEnum): + CREATE = "C" + UPDATE = "U" + DELETE = "D" + + +permission_to_operation_map = { + OperationShortCode.CREATE: Operation.CREATE, + OperationShortCode.UPDATE: Operation.UPDATE, + OperationShortCode.DELETE: Operation.DELETE, +} diff --git a/lambdas/shared/src/common/models/constants.py b/lambdas/shared/src/common/models/constants.py index 7bbe99c5e..73d6fe926 100644 --- a/lambdas/shared/src/common/models/constants.py +++ b/lambdas/shared/src/common/models/constants.py @@ -53,10 +53,6 @@ class Constants: # https://digital.nhs.uk/developer/api-catalogue/personal-demographics-service-fhir#post-/Patient PERSON_NAME_ELEMENT_MAX_LENGTH = 35 - SUPPLIER_PERMISSIONS_KEY = "supplier_permissions" - VACCINE_TYPE_TO_DISEASES_HASH_KEY = "vacc_to_diseases" - DISEASES_TO_VACCINE_TYPE_HASH_KEY = "diseases_to_vacc" - COMPLETED_STATUS = "completed" REINSTATED_RECORD_STATUS = "reinstated" @@ -64,17 +60,23 @@ class Constants: class Urls: """Urls which are expected to be used within the FHIR Immunization Resource json data""" - nhs_number = "https://fhir.nhs.uk/Id/nhs-number" - vaccination_procedure = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure" - snomed = "http://snomed.info/sct" # NOSONAR(S5332) - nhs_number_verification_status_structure_definition = ( + NHS_NUMBER = "https://fhir.nhs.uk/Id/nhs-number" + VACCINATION_PROCEDURE = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure" + SNOMED = "http://snomed.info/sct" # NOSONAR(S5332) + NHS_NUMBER_VERIFICATION_STATUS_STRUCTURE_DEFINITION = ( "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-NHSNumberVerificationStatus" ) - nhs_number_verification_status_code_system = ( + NHS_NUMBER_VERIFICATION_STATUS_CODE_SYSTEM = ( "https://fhir.hl7.org.uk/CodeSystem/UKCore-NHSNumberVerificationStatusEngland" ) - ods_organization_code = "https://fhir.nhs.uk/Id/ods-organization-code" - urn_school_number = "https://fhir.hl7.org.uk/Id/urn-school-number" + ODS_ORGANIZATION_CODE = "https://fhir.nhs.uk/Id/ods-organization-code" + URN_SCHOOL_NUMBER = "https://fhir.hl7.org.uk/Id/urn-school-number" + NULL_FLAVOUR_CODES = "http://terminology.hl7.org/CodeSystem/v3-NullFlavor" # NOSONAR(S5332) -SUPPLIER_PERMISSIONS_HASH_KEY = "supplier_permissions" +class RedisHashKeys: + """Redis hash keys""" + + DISEASES_TO_VACCINE_TYPE_HASH_KEY = "diseases_to_vacc" + SUPPLIER_PERMISSIONS_HASH_KEY = "supplier_permissions" + VACCINE_TYPE_TO_DISEASES_HASH_KEY = "vacc_to_diseases" diff --git a/lambdas/shared/src/common/models/fhir_immunization_pre_validators.py b/lambdas/shared/src/common/models/fhir_immunization_pre_validators.py index 3da10e0b7..338263eca 100644 --- a/lambdas/shared/src/common/models/fhir_immunization_pre_validators.py +++ b/lambdas/shared/src/common/models/fhir_immunization_pre_validators.py @@ -591,7 +591,7 @@ def pre_validate_vaccination_procedure_code(self, values: dict) -> None: (legacy CSV field name: VACCINATION_PROCEDURE_CODE) exists, then it is a non-empty string """ url = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-" + "VaccinationProcedure" - system = Urls.snomed + system = Urls.SNOMED field_type = "code" field_location = generate_field_location_for_extension(url, system, field_type) try: @@ -608,7 +608,7 @@ def pre_validate_vaccination_procedure_display(self, values: dict) -> None: (legacy CSV field name: VACCINATION_PROCEDURE_TERM) exists, then it is a non-empty string """ url = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-" + "VaccinationProcedure" - system = Urls.snomed + system = Urls.SNOMED field_type = "display" field_location = generate_field_location_for_extension(url, system, field_type) try: @@ -624,7 +624,7 @@ def pre_validate_vaccination_situation_code(self, values: dict) -> None: (legacy CSV field name: VACCINATION_SITUATION_CODE) exists, then it is a non-empty string """ url = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationSituation" - system = Urls.snomed + system = Urls.SNOMED field_type = "code" field_location = generate_field_location_for_extension(url, system, field_type) try: @@ -640,7 +640,7 @@ def pre_validate_vaccination_situation_display(self, values: dict) -> None: (legacy CSV field name: VACCINATION_SITUATION_TERM) exists, then it is a non-empty string """ url = "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationSituation" - system = Urls.snomed + system = Urls.SNOMED field_type = "display" field_location = generate_field_location_for_extension(url, system, field_type) try: @@ -705,9 +705,9 @@ def pre_validate_target_disease_codings(self, values: dict) -> None: field_location = f"protocolApplied[0].targetDisease[{i}].coding" try: coding = values["protocolApplied"][0]["targetDisease"][i]["coding"] - if sum(1 for x in coding if x.get("system") == Urls.snomed) != 1: + if sum(1 for x in coding if x.get("system") == Urls.SNOMED) != 1: raise ValueError( - f"{field_location} must contain exactly one element with a system of {Urls.snomed}" + f"{field_location} must contain exactly one element with a system of {Urls.SNOMED}" ) except KeyError: pass @@ -719,7 +719,7 @@ def pre_validate_disease_type_coding_codes(self, values: dict) -> None: Pre-validate that, if protocolApplied[0].targetDisease[{i}].coding[?(@.system=='http://snomed.info/sct')].code exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED try: for i in range(len(values["protocolApplied"][0]["targetDisease"])): field_location = f"protocolApplied[0].targetDisease[{i}].coding[?(@.system=='{url}')].code" @@ -778,7 +778,7 @@ def pre_validate_site_coding_code(self, values: dict) -> None: Pre-validate that, if site.coding[?(@.system=='http://snomed.info/sct')].code (legacy CSV field name: SITE_OF_VACCINATION_CODE) exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"site.coding[?(@.system=='{url}')].code" try: site_coding_code = [x for x in values["site"]["coding"] if x.get("system") == url][0]["code"] @@ -791,7 +791,7 @@ def pre_validate_site_coding_display(self, values: dict) -> None: Pre-validate that, if site.coding[?(@.system=='http://snomed.info/sct')].display (legacy CSV field name: SITE_OF_VACCINATION_TERM) exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"site.coding[?(@.system=='{url}')].display" try: field_value = [x for x in values["site"]["coding"] if x.get("system") == url][0]["display"] @@ -812,7 +812,7 @@ def pre_validate_route_coding_code(self, values: dict) -> None: Pre-validate that, if route.coding[?(@.system=='http://snomed.info/sct')].code (legacy CSV field name: ROUTE_OF_VACCINATION_CODE) exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"route.coding[?(@.system=='{url}')].code" try: field_value = [x for x in values["route"]["coding"] if x.get("system") == url][0]["code"] @@ -825,7 +825,7 @@ def pre_validate_route_coding_display(self, values: dict) -> None: Pre-validate that, if route.coding[?(@.system=='http://snomed.info/sct')].display (legacy CSV field name: ROUTE_OF_VACCINATION_TERM) exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"route.coding[?(@.system=='{url}')].display" try: field_value = [x for x in values["route"]["coding"] if x.get("system") == url][0]["display"] @@ -966,7 +966,7 @@ def pre_validate_vaccine_code(self, values: dict) -> None: NOTE: vaccineCode is a mandatory FHIR field. A value of None will be rejected by the FHIR model before pre-validators are run. """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"vaccineCode.coding[?(@.system=='{url}')].code" try: field_value = [x for x in values["vaccineCode"]["coding"] if x.get("system") == url][0]["code"] @@ -980,7 +980,7 @@ def pre_validate_vaccine_display(self, values: dict) -> None: Pre-validate that, if vaccineCode.coding[?(@.system=='http://snomed.info/sct')].display (legacy CSV field : VACCINE_PRODUCT_TERM) exists, then it is a non-empty string """ - url = Urls.snomed + url = Urls.SNOMED field_location = f"vaccineCode.coding[?(@.system=='{url}')].display" try: field_value = [x for x in values["vaccineCode"]["coding"] if x.get("system") == url][0]["display"] diff --git a/lambdas/shared/src/common/models/field_locations.py b/lambdas/shared/src/common/models/field_locations.py index 71d42a6ab..9fb377ef7 100644 --- a/lambdas/shared/src/common/models/field_locations.py +++ b/lambdas/shared/src/common/models/field_locations.py @@ -23,7 +23,7 @@ class FieldLocations: """ target_disease = "protocolApplied[0].targetDisease" - target_disease_codes = f"protocolApplied[0].targetDisease[0].coding[?(@.system=='{Urls.snomed}')].code" + target_disease_codes = f"protocolApplied[0].targetDisease[0].coding[?(@.system=='{Urls.SNOMED}')].code" patient_identifier_value = ( "contained[?(@.resourceType=='Patient')].identifier[0].value" # TODO: Fix to use nhs number url lookup ) @@ -42,20 +42,20 @@ class FieldLocations: practitioner_name_family: str = field(init=False) recorded = "recorded" primary_source = "primarySource" - vaccination_procedure_code = generate_field_location_for_extension(Urls.vaccination_procedure, Urls.snomed, "code") + vaccination_procedure_code = generate_field_location_for_extension(Urls.VACCINATION_PROCEDURE, Urls.SNOMED, "code") vaccination_procedure_display = generate_field_location_for_extension( - Urls.vaccination_procedure, Urls.snomed, "display" + Urls.VACCINATION_PROCEDURE, Urls.SNOMED, "display" ) dose_number_positive_int = "protocolApplied[0].doseNumberPositiveInt" - vaccine_code_coding_code = f"vaccineCode.coding[?(@.system=='{Urls.snomed}')].code" - vaccine_code_coding_display = f"vaccineCode.coding[?(@.system=='{Urls.snomed}')].display" + vaccine_code_coding_code = f"vaccineCode.coding[?(@.system=='{Urls.SNOMED}')].code" + vaccine_code_coding_display = f"vaccineCode.coding[?(@.system=='{Urls.SNOMED}')].display" manufacturer_display = "manufacturer.display" lot_number = "lotNumber" expiration_date = "expirationDate" - site_coding_code = f"site.coding[?(@.system=='{Urls.snomed}')].code" - site_coding_display = f"site.coding[?(@.system=='{Urls.snomed}')].display" - route_coding_code = f"route.coding[?(@.system=='{Urls.snomed}')].code" - route_coding_display = f"route.coding[?(@.system=='{Urls.snomed}')].display" + site_coding_code = f"site.coding[?(@.system=='{Urls.SNOMED}')].code" + site_coding_display = f"site.coding[?(@.system=='{Urls.SNOMED}')].display" + route_coding_code = f"route.coding[?(@.system=='{Urls.SNOMED}')].code" + route_coding_display = f"route.coding[?(@.system=='{Urls.SNOMED}')].display" dose_quantity_value = "doseQuantity.value" dose_quantity_code = "doseQuantity.code" dose_quantity_unit = "doseQuantity.unit" diff --git a/lambdas/shared/src/common/models/obtain_field_value.py b/lambdas/shared/src/common/models/obtain_field_value.py index 3d3ff7e43..4327a4f31 100644 --- a/lambdas/shared/src/common/models/obtain_field_value.py +++ b/lambdas/shared/src/common/models/obtain_field_value.py @@ -27,7 +27,7 @@ def patient_identifier_value(imms: dict): """Obtains patient_identifier_value value""" contained_patient = get_contained_patient(imms) contained_patient_identifier = [ - x for x in contained_patient.get("identifier") if x.get("system") == Urls.nhs_number + x for x in contained_patient.get("identifier") if x.get("system") == Urls.NHS_NUMBER ][0] return contained_patient_identifier["value"] @@ -134,7 +134,7 @@ def report_origin_text(imms: dict): @staticmethod def vaccination_procedure_code(imms: dict): """Obtains vaccination_procedure_code value""" - return get_generic_extension_value(imms, Urls.vaccination_procedure, Urls.snomed, "code") + return get_generic_extension_value(imms, Urls.VACCINATION_PROCEDURE, Urls.SNOMED, "code") # @staticmethod # def vaccination_procedure_display(imms: dict): diff --git a/lambdas/shared/src/common/models/utils/validation_utils.py b/lambdas/shared/src/common/models/utils/validation_utils.py index 294aa3bdb..0f2dea2ed 100644 --- a/lambdas/shared/src/common/models/utils/validation_utils.py +++ b/lambdas/shared/src/common/models/utils/validation_utils.py @@ -2,7 +2,7 @@ from fhir.resources.R4B.identifier import Identifier -from common.models.constants import Constants, Urls +from common.models.constants import RedisHashKeys, Urls from common.models.errors import InconsistentIdentifierError, InconsistentResourceVersionError, MandatoryError from common.models.field_names import FieldNames from common.models.obtain_field_value import ObtainFieldValue @@ -26,7 +26,7 @@ def get_target_disease_codes(immunization: dict): # For each item in the target disease list, extract the snomed code for i, element in enumerate(target_disease): try: - code = [x["code"] for x in element["coding"] if x.get("system") == Urls.snomed][0] + code = [x["code"] for x in element["coding"] if x.get("system") == Urls.SNOMED][0] except (KeyError, IndexError) as error: raise MandatoryError( f"protocolApplied[0].targetDisease[{i}].coding[?(@.system=='http://snomed.info/sct')].code" @@ -51,11 +51,11 @@ def convert_disease_codes_to_vaccine_type( otherwise raises a value error """ key = ":".join(sorted(disease_codes_input)) - vaccine_type = get_redis_client().hget(Constants.DISEASES_TO_VACCINE_TYPE_HASH_KEY, key) + vaccine_type = get_redis_client().hget(RedisHashKeys.DISEASES_TO_VACCINE_TYPE_HASH_KEY, key) if not vaccine_type: raise ValueError( - f"Validation errors: protocolApplied[0].targetDisease[*].coding[?(@.system=='{Urls.snomed}')].code" + f"Validation errors: protocolApplied[0].targetDisease[*].coding[?(@.system=='{Urls.SNOMED}')].code" f" - {disease_codes_input} is not a valid combination of disease codes for this service" ) return vaccine_type diff --git a/sonar-project.properties b/sonar-project.properties index bba260dd4..6d2749826 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,6 +4,7 @@ sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/devtools/**,**/proxies/**,**/utilities/scripts/**,**/infrastructure/account/**,**/infrastructure/instance/**,**/infrastructure/grafana/**,**/terraform_aws_backup/**,**/tests/** +sonar.coverage.exclusions=lambdas/shared/src/common/models/batch_constants.py sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml sonar.cpd.exclusions=**/Dockerfile sonar.issue.ignore.multicriteria=exclude_snomed_urls,exclude_hl7_urls @@ -13,4 +14,4 @@ sonar.issue.ignore.multicriteria.exclude_hl7_urls.ruleKey=python:S5332 sonar.issue.ignore.multicriteria.exclude_hl7_urls.resourceKey=**http://terminology\.hl7\.org/CodeSystem/v3-NullFlavor** sonar.issue.ignore.multicriteria=e1 sonar.issue.ignore.multicriteria.e1.ruleKey=python:S5443 -sonar.issue.ignore.multicriteria.e1.resourceKey=**/mns_setup.py \ No newline at end of file +sonar.issue.ignore.multicriteria.e1.resourceKey=**/mns_setup.py