Skip to content

Commit 86ac22d

Browse files
committed
fix suername validation
1 parent 3a0d6a8 commit 86ac22d

2 files changed

Lines changed: 116 additions & 73 deletions

File tree

tests/e2e_automation/features/batchTests/Steps/test_update_batch_steps.py

Lines changed: 51 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,11 @@
33
import pandas as pd
44
from pytest_bdd import given, scenarios, then, when
55
from src.objectModels.batch.batch_file_builder import build_batch_file
6-
from utilities.batch_file_helper import read_and_validate_csv_bus_ack_file_content
6+
from utilities.batch_file_helper import (
7+
read_and_validate_csv_bus_ack_file_content,
8+
validate_json_bus_ack_file_failure_records,
9+
validate_json_bus_ack_file_structure_and_metadata,
10+
)
711
from utilities.enums import GenderCode
812
from utilities.error_constants import ERROR_MAP
913

@@ -27,13 +31,14 @@
2731
from .batch_common_steps import (
2832
build_dataFrame_using_datatable,
2933
create_batch_file,
30-
json_bus_ack_will_only_contain_file_metadata_and_correct_failure_record_entries,
3134
)
3235

3336
scenarios("batchTests/update_batch.feature")
3437

3538

36-
@given("batch file is created for below data as full dataset and each record has a valid update record in the same file")
39+
@given(
40+
"batch file is created for below data as full dataset and each record has a valid update record in the same file"
41+
)
3742
def valid_batch_file_is_created_with_details(datatable, context):
3843
build_dataFrame_using_datatable(datatable, context)
3944
df_new = context.vaccine_df.copy()
@@ -55,7 +60,9 @@ def create_valid_vaccination_record_through_api(context):
5560
)
5661
def create_valid_vaccination_record_with_same_unique_id_as_batch_file(context):
5762
valid_json_payload_is_created(context)
58-
context.immunization_object.identifier[0].value = f"Fail-duplicate{str(uuid.uuid4())}-duplicate"
63+
context.immunization_object.identifier[0].value = (
64+
f"Fail-duplicate{str(uuid.uuid4())}-duplicate"
65+
)
5966
Trigger_the_post_create_request(context)
6067
The_request_will_have_status_code(context, 201)
6168
validateCreateLocation(context)
@@ -94,14 +101,20 @@ def upload_batch_file_to_s3_for_update(context):
94101
create_batch_file(context)
95102

96103

97-
@then("The delta and imms event table will be populated with the correct data for api created event")
98-
@given("The delta and imms event table will be populated with the correct data for api created event")
104+
@then(
105+
"The delta and imms event table will be populated with the correct data for api created event"
106+
)
107+
@given(
108+
"The delta and imms event table will be populated with the correct data for api created event"
109+
)
99110
def validate_imms_delta_table_for_api_created_event(context):
100111
validate_imms_event_table_by_operation(context, "created")
101112
validate_imms_delta_table_by_ImmsID(context)
102113

103114

104-
@when("Send a update for Immunization event created with vaccination detail being updated through API request")
115+
@when(
116+
"Send a update for Immunization event created with vaccination detail being updated through API request"
117+
)
105118
def send_update_for_immunization_event_with_vaccination_detail_updated(context):
106119
valid_json_payload_is_created(context)
107120
row = context.vaccine_df.loc[0]
@@ -110,11 +123,15 @@ def send_update_for_immunization_event_with_vaccination_detail_updated(context):
110123
context.immunization_object.contained[1].name[0].family = row["PERSON_SURNAME"]
111124
reverse_gender_map = {v.value: v.name for v in GenderCode}
112125
code = row["PERSON_GENDER_CODE"]
113-
context.immunization_object.contained[1].gender = reverse_gender_map.get(code, "unknown")
114-
context.immunization_object.contained[
115-
1
116-
].birthDate = f"{row['PERSON_DOB'][:4]}-{row['PERSON_DOB'][4:6]}-{row['PERSON_DOB'][6:]}"
117-
context.immunization_object.contained[1].address[0].postalCode = row["PERSON_POSTCODE"]
126+
context.immunization_object.contained[1].gender = reverse_gender_map.get(
127+
code, "unknown"
128+
)
129+
context.immunization_object.contained[1].birthDate = (
130+
f"{row['PERSON_DOB'][:4]}-{row['PERSON_DOB'][4:6]}-{row['PERSON_DOB'][6:]}"
131+
)
132+
context.immunization_object.contained[1].address[0].postalCode = row[
133+
"PERSON_POSTCODE"
134+
]
118135
context.immunization_object.identifier[0].value = row["UNIQUE_ID"]
119136
context.immunization_object.identifier[0].system = row["UNIQUE_ID_URI"]
120137
send_update_for_immunization_event(context)
@@ -128,7 +145,9 @@ def api_request_will_be_successful_and_tables_will_be_updated_correctly(context)
128145
validate_delta_table_for_updated_event(context)
129146

130147

131-
@when("Update to above vaccination record is made through batch file upload with mandatory field missing")
148+
@when(
149+
"Update to above vaccination record is made through batch file upload with mandatory field missing"
150+
)
132151
def upload_batch_file_to_s3_for_update_with_mandatory_field_missing(context):
133152
# Build base record
134153
record = build_batch_file(context)
@@ -145,7 +164,9 @@ def upload_batch_file_to_s3_for_update_with_mandatory_field_missing(context):
145164
"UNIQUE_ID_URI": context.create_object.identifier[0].system,
146165
}
147166
context.vaccine_df.loc[0, list(base_fields.keys())] = list(base_fields.values())
148-
context.vaccine_df = pd.concat([context.vaccine_df.loc[[0]]] * 19, ignore_index=True)
167+
context.vaccine_df = pd.concat(
168+
[context.vaccine_df.loc[[0]]] * 19, ignore_index=True
169+
)
149170
missing_cases = {
150171
0: {"SITE_CODE": "", "PERSON_SURNAME": "empty_site_code"},
151172
1: {"SITE_CODE_TYPE_URI": "", "PERSON_SURNAME": "empty_site_code_uri"},
@@ -177,16 +198,26 @@ def upload_batch_file_to_s3_for_update_with_mandatory_field_missing(context):
177198
create_batch_file(context)
178199

179200

180-
@then("csv bus ack will have error records for all the updated records in the batch file")
201+
@then(
202+
"csv bus ack will have error records for all the updated records in the batch file"
203+
)
181204
def all_records_are_processed_successfully_in_the_batch_file(context):
182205
file_rows = read_and_validate_csv_bus_ack_file_content(context, False, True)
183206
all_valid = validate_bus_ack_file_for_error_by_surname(context, file_rows)
184207
assert all_valid, "One or more records failed validation checks"
185208

186209

187-
@then("json bus ack will have error records for all the updated records in the batch file")
210+
@then(
211+
"json bus ack will have error records for all the updated records in the batch file"
212+
)
188213
def json_bus_ack_will_have_error_records_for_all_updated_records_in_batch_file(context):
189-
json_bus_ack_will_only_contain_file_metadata_and_correct_failure_record_entries(context)
214+
json_content = context.fileContentJson
215+
assert json_content is not None, "BUS Ack JSON content is None"
216+
validate_json_bus_ack_file_structure_and_metadata(context)
217+
success = validate_json_bus_ack_file_failure_records(
218+
context, expected_failure=True, use_username_for_error_lookup=True
219+
)
220+
assert success, "Failed to validate JSON bus ack file failure records"
190221

191222

192223
def validate_bus_ack_file_for_error_by_surname(context, file_rows) -> bool:
@@ -198,7 +229,9 @@ def validate_bus_ack_file_for_error_by_surname(context, file_rows) -> bool:
198229
bus_ack_row_number = batch_idx + 2
199230
row_data_list = file_rows.get(bus_ack_row_number)
200231
if not row_data_list:
201-
print(f"Batch row {batch_idx}: No BUS ACK entry found for row number {bus_ack_row_number}")
232+
print(
233+
f"Batch row {batch_idx}: No BUS ACK entry found for row number {bus_ack_row_number}"
234+
)
202235
overall_valid = False
203236
continue
204237
surname = str(row.get("PERSON_SURNAME", "")).strip()

tests/e2e_automation/utilities/batch_file_helper.py

Lines changed: 65 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,6 @@ def validate_bus_ack_file_for_error(context, file_rows) -> bool:
189189

190190

191191
def read_and_validate_csv_bus_ack_file_content(context, by_local_id: bool = True, by_row_number: bool = False) -> dict:
192-
# Prevent invalid combinations
193192
if by_local_id and by_row_number:
194193
raise ValueError("Choose only one mode: by_local_id OR by_row_number")
195194

@@ -291,78 +290,89 @@ def validate_json_bus_ack_file_structure_and_metadata(context):
291290
)
292291

293292

294-
def validate_json_bus_ack_file_failure_records(context, expected_failure: bool = True):
293+
def validate_json_bus_ack_file_failure_records(
294+
context, expected_failure: bool = True, use_username_for_error_lookup: bool = False
295+
):
295296
data = json.loads(context.fileContentJson)
296297
report = BatchReport(**data)
297298
failures = report.failures or []
299+
298300
if not expected_failure:
299-
if not failures or len(failures) == 0:
301+
if not failures:
300302
return True
301-
else:
302-
print(f"Found {len(failures)} failure records in BUS ACK file as not expected")
303-
return False
304-
else:
305-
fail_mask = context.vaccine_df["UNIQUE_ID"].str.startswith("Fail-", na=False) | (
306-
context.vaccine_df["UNIQUE_ID"].str.strip() == ""
307-
)
308-
fail_df = context.vaccine_df[fail_mask]
303+
print(f"Found {len(failures)} failure records in BUS ACK file as not expected")
304+
return False
309305

310-
# Build expected localId values
311-
expected_local_ids = set(fail_df["UNIQUE_ID"].astype(str) + "^" + fail_df["UNIQUE_ID_URI"].astype(str))
306+
fail_mask = context.vaccine_df["UNIQUE_ID"].str.startswith("Fail-", na=False) | (
307+
context.vaccine_df["UNIQUE_ID"].str.strip() == ""
308+
)
309+
fail_df = context.vaccine_df[fail_mask]
312310

313-
overall_valid = True
311+
expected_local_ids = set(fail_df["UNIQUE_ID"].astype(str) + "^" + fail_df["UNIQUE_ID_URI"].astype(str))
314312

315-
for failure in failures:
316-
row_valid = True
313+
overall_valid = True
317314

318-
row_id = failure.rowId
319-
response_code = failure.responseCode
320-
response_display = failure.responseDisplay
321-
severity = failure.severity
322-
local_id = failure.localId
323-
operation_outcome = failure.operationOutcome
315+
for failure in failures:
316+
row_valid = True
324317

325-
# --- Validate localId exists ---
326-
if local_id not in expected_local_ids:
327-
print(f"Failure rowId {row_id}: localId '{local_id}' not expected")
328-
row_valid = False
318+
row_id = failure.rowId
319+
local_id = failure.localId
320+
operation_outcome = failure.operationOutcome
329321

330-
# --- Validate fixed fields ---
331-
if response_code != "30002":
332-
print(f"Failure rowId {row_id}: responseCode != '30002'")
333-
row_valid = False
322+
if local_id not in expected_local_ids:
323+
print(f"Failure rowId {row_id}: localId '{local_id}' not expected")
324+
row_valid = False
334325

335-
if response_display != "Business Level Response Value - Processing Error":
336-
print(f"Failure rowId {row_id}: responseDisplay incorrect")
337-
row_valid = False
326+
if failure.responseCode != "30002":
327+
print(f"Failure rowId {row_id}: responseCode != '30002'")
328+
row_valid = False
338329

339-
if severity != "Fatal":
340-
print(f"Failure rowId {row_id}: severity != 'Fatal'")
341-
row_valid = False
330+
if failure.responseDisplay != "Business Level Response Value - Processing Error":
331+
print(f"Failure rowId {row_id}: responseDisplay incorrect")
332+
row_valid = False
342333

343-
try:
344-
df_row = context.vaccine_df.loc[row_id - 2]
345-
prefix = str(df_row["UNIQUE_ID"]).strip()
334+
if failure.severity != "Fatal":
335+
print(f"Failure rowId {row_id}: severity != 'Fatal'")
336+
row_valid = False
346337

347-
if prefix in ["", " ", "nan"]:
348-
expected_error = df_row["PERSON_SURNAME"]
349-
else:
350-
parts = prefix.split("-")
351-
expected_error = parts[2] if len(parts) > 2 else "invalid_prefix_format"
338+
try:
339+
df_row = context.vaccine_df.loc[row_id - 2]
340+
expected_error = get_expected_error(df_row, use_username_for_error_lookup)
352341

353-
expected_diagnostic = ERROR_MAP.get(expected_error, {}).get("diagnostics")
342+
expected_diagnostic = ERROR_MAP.get(expected_error, {}).get("diagnostics")
354343

355-
if operation_outcome != expected_diagnostic:
356-
print(
357-
f"Failure rowId {row_id}: operationOutcome mismatch. "
358-
f"Expected '{expected_diagnostic}', got '{operation_outcome}'"
359-
)
360-
row_valid = False
344+
# Duplicate case
345+
if expected_error == "duplicate" and expected_diagnostic:
346+
expected_diagnostic = expected_diagnostic.replace(
347+
"<identifier>",
348+
f"{context.immunization_object.identifier[0].system}#"
349+
f"{context.immunization_object.identifier[0].value}",
350+
)
361351

362-
except Exception as e:
363-
print(f"Failure rowId {row_id}: error resolving expected diagnostics: {e}")
352+
if operation_outcome != expected_diagnostic:
353+
print(
354+
f"Failure rowId {row_id}: operationOutcome mismatch. "
355+
f"Expected '{expected_diagnostic}', got '{operation_outcome}'"
356+
)
364357
row_valid = False
365358

366-
overall_valid = overall_valid and row_valid
359+
except Exception as e:
360+
print(f"Failure rowId {row_id}: error resolving expected diagnostics: {e}")
361+
row_valid = False
362+
363+
overall_valid = overall_valid and row_valid
364+
365+
return overall_valid
366+
367+
368+
def get_expected_error(df_row, use_surname: bool):
369+
prefix = str(df_row["UNIQUE_ID"]).strip()
370+
371+
if prefix in ["", " ", "nan"]:
372+
return df_row.get("PERSON_SURNAME", "").strip()
373+
374+
if use_surname:
375+
return str(df_row.get("PERSON_SURNAME", "")).strip()
367376

368-
return overall_valid
377+
parts = prefix.split("-")
378+
return parts[2] if len(parts) > 2 else "invalid_prefix_format"

0 commit comments

Comments
 (0)