Skip to content

Commit b1f9fc3

Browse files
edhall-nhsmotola
andauthored
VED-881: refactor constants for batch lambdas (#1096)
* Consolidate filenameprocessor consts * Consolidate batch_processor_filer consts * Consolidate recordprocessor consts * Consolidate ack_backend consts * Consolidate batch_processor_filter env var consts * Consolidate filenameprocessor env var consts * Consolidate recordprocessor env var consts * Consolidate ack_backend env var consts * Self review * Address review comments * Self review round 2 * Add sonar coverage exclusion for batch_constants --------- Co-authored-by: Akinola Olutola <akinola.olutola1@nhs.net>
1 parent c4c1649 commit b1f9fc3

51 files changed

Lines changed: 192 additions & 329 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

lambdas/ack_backend/src/audit_table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
"""Add the filename to the audit table and check for duplicates."""
22

33
from common.clients import dynamodb_client, logger
4+
from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus
45
from common.models.errors import UnhandledAuditTableError
5-
from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus
66

77
CONDITION_EXPRESSION = "attribute_exists(message_id)"
88

lambdas/ack_backend/src/constants.py

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,48 +1,11 @@
11
"""Constants for ack lambda"""
22

3-
import os
4-
5-
AUDIT_TABLE_NAME = os.getenv("AUDIT_TABLE_NAME")
6-
73
COMPLETED_ACK_DIR = "forwardedFile"
84
TEMP_ACK_DIR = "TempAck"
95
BATCH_FILE_PROCESSING_DIR = "processing"
106
BATCH_FILE_ARCHIVE_DIR = "archive"
117

128

13-
def get_source_bucket_name() -> str:
14-
"""Get the SOURCE_BUCKET_NAME environment from environment variables."""
15-
return os.getenv("SOURCE_BUCKET_NAME")
16-
17-
18-
def get_ack_bucket_name() -> str:
19-
"""Get the ACK_BUCKET_NAME environment from environment variables."""
20-
return os.getenv("ACK_BUCKET_NAME")
21-
22-
23-
class FileStatus:
24-
"""File status constants"""
25-
26-
QUEUED = "Queued"
27-
PROCESSING = "Processing"
28-
PROCESSED = "Processed"
29-
DUPLICATE = "Duplicate"
30-
31-
32-
class AuditTableKeys:
33-
"""Audit table keys"""
34-
35-
FILENAME = "filename"
36-
MESSAGE_ID = "message_id"
37-
QUEUE_NAME = "queue_name"
38-
RECORD_COUNT = "record_count"
39-
STATUS = "status"
40-
TIMESTAMP = "timestamp"
41-
INGESTION_END_TIME = "ingestion_end_time"
42-
RECORDS_SUCCEEDED = "records_succeeded"
43-
RECORDS_FAILED = "records_failed"
44-
45-
469
ACK_HEADERS = [
4710
"MESSAGE_HEADER_ID",
4811
"HEADER_RESPONSE_CODE",

lambdas/ack_backend/src/update_ack_file.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,13 @@
1212
)
1313
from common.aws_s3_utils import move_file
1414
from common.clients import get_s3_client, logger
15+
from common.models.batch_constants import ACK_BUCKET_NAME, SOURCE_BUCKET_NAME
1516
from constants import (
1617
ACK_HEADERS,
1718
BATCH_FILE_ARCHIVE_DIR,
1819
BATCH_FILE_PROCESSING_DIR,
1920
COMPLETED_ACK_DIR,
2021
TEMP_ACK_DIR,
21-
get_ack_bucket_name,
22-
get_source_bucket_name,
2322
)
2423
from logging_decorators import complete_batch_file_process_logging_decorator
2524

@@ -71,10 +70,8 @@ def complete_batch_file_process(
7170
the audit table status"""
7271
ack_filename = f"{file_key.replace('.csv', f'_BusAck_{created_at_formatted_string}.csv')}"
7372

74-
move_file(get_ack_bucket_name(), f"{TEMP_ACK_DIR}/{ack_filename}", f"{COMPLETED_ACK_DIR}/{ack_filename}")
75-
move_file(
76-
get_source_bucket_name(), f"{BATCH_FILE_PROCESSING_DIR}/{file_key}", f"{BATCH_FILE_ARCHIVE_DIR}/{file_key}"
77-
)
73+
move_file(ACK_BUCKET_NAME, f"{TEMP_ACK_DIR}/{ack_filename}", f"{COMPLETED_ACK_DIR}/{ack_filename}")
74+
move_file(SOURCE_BUCKET_NAME, f"{BATCH_FILE_PROCESSING_DIR}/{file_key}", f"{BATCH_FILE_ARCHIVE_DIR}/{file_key}")
7875

7976
total_ack_rows_processed, total_failures = get_record_count_and_failures_by_message_id(message_id)
8077
change_audit_table_status_to_processed(file_key, message_id)
@@ -99,7 +96,7 @@ def obtain_current_ack_content(temp_ack_file_key: str) -> StringIO:
9996
"""Returns the current ack file content if the file exists, or else initialises the content with the ack headers."""
10097
try:
10198
# If ack file exists in S3 download the contents
102-
existing_ack_file = get_s3_client().get_object(Bucket=get_ack_bucket_name(), Key=temp_ack_file_key)
99+
existing_ack_file = get_s3_client().get_object(Bucket=ACK_BUCKET_NAME, Key=temp_ack_file_key)
103100
existing_content = existing_ack_file["Body"].read().decode("utf-8")
104101
except ClientError as error:
105102
# If ack file does not exist in S3 create a new file containing the headers only
@@ -132,7 +129,6 @@ def update_ack_file(
132129
accumulated_csv_content.write(cleaned_row + "\n")
133130

134131
csv_file_like_object = BytesIO(accumulated_csv_content.getvalue().encode("utf-8"))
135-
ack_bucket_name = get_ack_bucket_name()
136132

137-
get_s3_client().upload_fileobj(csv_file_like_object, ack_bucket_name, temp_ack_file_key)
138-
logger.info("Ack file updated to %s: %s", ack_bucket_name, completed_ack_file_key)
133+
get_s3_client().upload_fileobj(csv_file_like_object, ACK_BUCKET_NAME, temp_ack_file_key)
134+
logger.info("Ack file updated to %s: %s", ACK_BUCKET_NAME, completed_ack_file_key)

lambdas/ack_backend/tests/test_ack_processor.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,12 @@ def setUp(self) -> None:
6363
self.logger_info_patcher = patch("common.log_decorator.logger.info")
6464
self.mock_logger_info = self.logger_info_patcher.start()
6565

66+
self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION)
67+
self.ack_bucket_patcher.start()
68+
69+
self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", BucketNames.SOURCE)
70+
self.source_bucket_patcher.start()
71+
6672
def tearDown(self) -> None:
6773
GenericTearDown(self.s3_client, self.firehose_client, self.dynamodb_client)
6874
self.mock_logger_info.stop()

lambdas/ack_backend/tests/test_audit_table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22
from unittest.mock import call, patch
33

44
import audit_table
5+
from common.models.batch_constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus
56
from common.models.errors import UnhandledAuditTableError
6-
from constants import AUDIT_TABLE_NAME, AuditTableKeys, FileStatus
77

88

99
class TestAuditTable(unittest.TestCase):

lambdas/ack_backend/tests/test_logging_decorators.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from unittest.mock import patch
33

44
import logging_decorators
5+
from utils.mock_environment_variables import BucketNames
56

67

78
class TestLoggingDecorators(unittest.TestCase):
@@ -12,6 +13,9 @@ def setUp(self):
1213
self.firehose_patcher = patch("common.log_firehose.firehose_client")
1314
self.mock_firehose = self.firehose_patcher.start()
1415

16+
self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", BucketNames.SOURCE)
17+
self.source_bucket_patcher.start()
18+
1519
def tearDown(self):
1620
self.logger_patcher.stop()
1721
self.firehose_patcher.stop()

lambdas/ack_backend/tests/test_splunk_logging.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,9 @@ def setUp(self):
4545
Body=mock_source_file_with_100_rows.getvalue(),
4646
)
4747

48+
self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION)
49+
self.ack_bucket_patcher.start()
50+
4851
def tearDown(self):
4952
GenericTearDown(self.s3_client)
5053

lambdas/ack_backend/tests/test_update_ack_file.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,9 @@ def setUp(self) -> None:
5757
self.logger_patcher = patch("update_ack_file.logger")
5858
self.mock_logger = self.logger_patcher.start()
5959

60+
self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", BucketNames.DESTINATION)
61+
self.ack_bucket_patcher.start()
62+
6063
def tearDown(self) -> None:
6164
GenericTearDown(self.s3_client)
6265

lambdas/ack_backend/tests/test_update_ack_file_flow.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,23 +5,21 @@
55
from moto import mock_aws
66

77
import update_ack_file
8+
from utils.mock_environment_variables import BucketNames
89

910

1011
@mock_aws
1112
class TestUpdateAckFileFlow(unittest.TestCase):
1213
def setUp(self):
1314
self.s3_client = boto3.client("s3", region_name="eu-west-2")
1415

15-
self.ack_bucket_name = "my-ack-bucket"
16-
self.source_bucket_name = "my-source-bucket"
17-
self.ack_bucket_patcher = patch("update_ack_file.get_ack_bucket_name", return_value=self.ack_bucket_name)
18-
self.mock_get_ack_bucket_name = self.ack_bucket_patcher.start()
16+
self.ack_bucket_name = BucketNames.DESTINATION
17+
self.source_bucket_name = BucketNames.SOURCE
18+
self.ack_bucket_patcher = patch("update_ack_file.ACK_BUCKET_NAME", self.ack_bucket_name)
19+
self.ack_bucket_patcher.start()
1920

20-
self.source_bucket_patcher = patch(
21-
"update_ack_file.get_source_bucket_name",
22-
return_value=self.source_bucket_name,
23-
)
24-
self.mock_get_source_bucket_name = self.source_bucket_patcher.start()
21+
self.source_bucket_patcher = patch("update_ack_file.SOURCE_BUCKET_NAME", self.source_bucket_name)
22+
self.source_bucket_patcher.start()
2523

2624
self.s3_client.create_bucket(
2725
Bucket=self.ack_bucket_name,

lambdas/ack_backend/tests/utils/generic_setup_and_teardown_for_ack_backend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""Generic setup and teardown for ACK backend tests"""
22

3-
from constants import AuditTableKeys
3+
from common.models.batch_constants import AuditTableKeys
44
from tests.utils.mock_environment_variables import AUDIT_TABLE_NAME, REGION_NAME, BucketNames, Firehose
55

66

0 commit comments

Comments
 (0)