Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/workflows/deploy-backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,10 @@ jobs:
working-directory: infrastructure/instance
run: make init

- name: Set Terraform workspace
working-directory: infrastructure/instance
run: make workspace

- name: Terraform Plan
# Ignore cancellations to prevent Terraform from being killed while it holds a state lock
# A stuck process can still be killed with the force-cancel API operation
Expand Down Expand Up @@ -293,6 +297,10 @@ jobs:
working-directory: infrastructure/instance
run: make init

- name: Set Terraform workspace
working-directory: infrastructure/instance
run: make workspace

- name: Terraform Apply
# Ignore cancellations to prevent Terraform from being killed while it holds a state lock
# A stuck process can still be killed with the force-cancel API operation
Expand All @@ -302,6 +310,14 @@ jobs:
make apply-ci
echo "ID_SYNC_QUEUE_ARN=$(make -s output name=id_sync_queue_arn)" >> $GITHUB_ENV

- name: Apply Lambda event source mappings
if: ${{ !failure() }}
working-directory: infrastructure/event_source_mappings
run: |
make init
make adopt
make apply

- name: Install poetry
if: ${{ inputs.environment == 'dev' && inputs.create_mns_subscription }}
run: pip install poetry==2.1.4
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/pr-teardown.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,12 @@ jobs:
echo "Unsubscribing SQS to MNS for notifications..."
make unsubscribe

- name: Destroy Lambda event source mappings
working-directory: infrastructure/event_source_mappings
run: |
make init apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT
make destroy apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT

- name: Terraform Destroy
working-directory: infrastructure/instance
run: |
Expand Down
25 changes: 25 additions & 0 deletions infrastructure/event_source_mappings/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

46 changes: 46 additions & 0 deletions infrastructure/event_source_mappings/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
-include .env

apigee_environment ?= $(APIGEE_ENVIRONMENT)
environment ?= $(ENVIRONMENT)
sub_environment ?= $(SUB_ENVIRONMENT)
sub_environment_dir := $(if $(findstring pr-,$(sub_environment)),pr,$(sub_environment))
tf_var_file := ../instance/environments/$(environment)/$(sub_environment_dir)/variables.tfvars
has_sub_environment_scope = $(shell awk -F= '/^has_sub_environment_scope/ { gsub(/[[:space:]]/, "", $$2); print $$2 }' "$(tf_var_file)")
workspace_name = $(if $(filter false,$(has_sub_environment_scope)),$(environment),$(sub_environment))

tf_cmd = AWS_PROFILE=$(AWS_PROFILE) terraform

bucket_name = $(if $(filter dev,$(environment)),immunisation-$(apigee_environment),immunisation-$(environment))-terraform-state-files

tf_state = \
-backend-config="bucket=$(bucket_name)" \
-backend-config="key=event-source-mappings/state"

tf_vars = \
-var="sub_environment=$(sub_environment)" \
-var-file="$(tf_var_file)"

init:
$(tf_cmd) init $(tf_state) -upgrade

workspace:
$(tf_cmd) workspace select -or-create $(workspace_name) && echo "Switched to workspace/environment: $(workspace_name)"

adopt: workspace
ENVIRONMENT='$(environment)' SUB_ENVIRONMENT='$(sub_environment)' RESOURCE_SCOPE='$(workspace_name)' bash ../../utilities/scripts/adopt_event_source_mappings.sh $(tf_vars)

plan: workspace
$(tf_cmd) plan $(tf_vars)

apply: workspace
$(tf_cmd) apply $(tf_vars) --auto-approve

destroy: workspace
$(tf_cmd) destroy $(tf_vars) -auto-approve
$(tf_cmd) workspace select default
$(tf_cmd) workspace delete $(workspace_name)

output:
$(tf_cmd) output -raw $(name)

.PHONY : init workspace adopt plan apply destroy output
78 changes: 78 additions & 0 deletions infrastructure/event_source_mappings/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 6"
}
}
backend "s3" {
region = "eu-west-2"
key = "event-source-mappings/state"
use_lockfile = true
}
required_version = ">= 1.5.0"
}

provider "aws" {
region = var.aws_region
default_tags {
tags = {
Project = var.project_name
Environment = local.resource_scope
Service = var.service
}
}
}

locals {
resource_scope = var.has_sub_environment_scope ? var.sub_environment : var.environment
short_prefix = "${var.project_short_name}-${var.sub_environment}"
events_table_name = "imms-${local.resource_scope}-imms-events"
id_sync_queue_name = "imms-${local.resource_scope}-id-sync-queue"
delta_lambda_name = "${local.short_prefix}-delta-lambda"
delta_dlq_name = "${local.short_prefix}-delta-dlq"
id_sync_lambda_name = "${local.short_prefix}-id-sync-lambda"
}

data "aws_dynamodb_table" "events" {
name = local.events_table_name
}

data "aws_sqs_queue" "delta_dlq" {
name = local.delta_dlq_name
}

data "aws_sqs_queue" "id_sync" {
name = local.id_sync_queue_name
}

data "aws_lambda_function" "delta" {
function_name = local.delta_lambda_name
}

data "aws_lambda_function" "id_sync" {
function_name = local.id_sync_lambda_name
}

resource "aws_lambda_event_source_mapping" "delta_trigger" {
event_source_arn = data.aws_dynamodb_table.events.stream_arn
function_name = data.aws_lambda_function.delta.function_name
starting_position = "TRIM_HORIZON"

destination_config {
on_failure {
destination_arn = data.aws_sqs_queue.delta_dlq.arn
}
}

maximum_retry_attempts = 0
}

resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
event_source_arn = data.aws_sqs_queue.id_sync.arn
function_name = data.aws_lambda_function.id_sync.arn

batch_size = 10
maximum_batching_window_in_seconds = 5
function_response_types = ["ReportBatchItemFailures"]
}
4 changes: 4 additions & 0 deletions infrastructure/event_source_mappings/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
output "id_sync_queue_arn" {
description = "The ARN of the ID Sync (MNS NHS Number change) SQS queue"
value = data.aws_sqs_queue.id_sync.arn
}
97 changes: 97 additions & 0 deletions infrastructure/event_source_mappings/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
variable "environment" {
type = string
description = "Environment (AWS Account) name - dev, preprod or prod"
}

variable "sub_environment" {
type = string
description = "Sub-environment name, e.g. internal-dev, int-blue, blue"
}

variable "has_sub_environment_scope" {
description = "True if resources are scoped to the sub-environment. False for blue/green shared resources."
type = bool
default = false
}

variable "project_name" {
type = string
default = "immunisation"
}

variable "project_short_name" {
type = string
default = "imms"
}

variable "service" {
type = string
default = "fhir-api"
}

variable "aws_region" {
type = string
default = "eu-west-2"

validation {
condition = var.aws_region == "eu-west-2"
error_message = "AWS Region must be set to eu-west-2."
}
}

variable "immunisation_account_id" {
type = string
description = "Immunisation AWS Account ID"
}

variable "dspp_core_account_id" {
type = string
description = "DSPP Core AWS Account ID"
}

variable "mns_account_id" {
type = string
description = "MNS AWS account ID - trusted source for MNS notifications"
default = "631615744739"
}

variable "pds_environment" {
type = string
default = "int"
}

variable "mns_environment" {
type = string
default = "int"
}

variable "error_alarm_notifications_enabled" {
default = true
description = "Switch to enable error alarm notifications to Slack"
type = bool
}

variable "create_mesh_processor" {
type = bool
default = false
}

variable "mesh_no_invocation_period_seconds" {
type = number
default = 300
}

variable "dspp_submission_s3_bucket_name" {
type = string
default = "nhsd-dspp-core-ref-s3-submission-upload"
}

variable "dspp_submission_kms_key_alias" {
type = string
default = "nhsd-dspp-core-ref-s3-submission-upload-key"
}

variable "dynamodb_point_in_time_recovery_enabled" {
type = bool
default = false
}
4 changes: 4 additions & 0 deletions infrastructure/instance/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,7 @@ Note: If you switch environment configuration in .env ensure that you run `make

If you want to apply Terraform to a workspace created by a PR you can set the above SUB_ENVIRONMENT to the `PR-number` and ENVIRONMENT set to `dev`.
E.g. `pr-57`. You can use this to test out changes when tests fail in CI.

## Lambda Trigger Handoff

The `delta_trigger` and `id_sync_sqs_trigger` event source mappings are managed from `../event_source_mappings` so the main instance plan does not rewrite shared backend state. The deploy workflow applies the main instance first, then adopts or updates the trigger mappings from the dedicated trigger workspace.
14 changes: 0 additions & 14 deletions infrastructure/instance/delta.tf
Original file line number Diff line number Diff line change
Expand Up @@ -70,20 +70,6 @@ resource "aws_lambda_function" "delta_sync_lambda" {
]
}


resource "aws_lambda_event_source_mapping" "delta_trigger" {
event_source_arn = aws_dynamodb_table.events-dynamodb-table.stream_arn
function_name = aws_lambda_function.delta_sync_lambda.function_name
starting_position = "TRIM_HORIZON"
destination_config {
on_failure {
destination_arn = aws_sqs_queue.dlq.arn
}
}
maximum_retry_attempts = 0
}


resource "aws_sqs_queue" "dlq" {
name = "${local.short_prefix}-${local.dlq_name}"
}
Expand Down
17 changes: 0 additions & 17 deletions infrastructure/instance/id_sync_lambda.tf
Original file line number Diff line number Diff line change
Expand Up @@ -258,20 +258,3 @@ resource "aws_cloudwatch_metric_alarm" "id_sync_error_alarm" {
alarm_actions = [data.aws_sns_topic.imms_system_alert_errors.arn]
treat_missing_data = "notBreaching"
}



# delete config_lambda_notification / new_s3_invoke_permission - not required; duplicate

# NEW
resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
event_source_arn = aws_sqs_queue.id_sync_queue.arn
function_name = aws_lambda_function.id_sync_lambda.arn

# Optional: Configure batch size and other settings
batch_size = 10
maximum_batching_window_in_seconds = 5

# Optional: Configure error handling
function_response_types = ["ReportBatchItemFailures"]
}
16 changes: 16 additions & 0 deletions infrastructure/instance/temp.tf
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,22 @@ removed {
}
}

removed {
from = aws_lambda_event_source_mapping.delta_trigger

lifecycle {
destroy = false
}
}

removed {
from = aws_lambda_event_source_mapping.id_sync_sqs_trigger

lifecycle {
destroy = false
}
}

removed {
from = aws_ecr_repository.redis_sync_lambda_repository

Expand Down
Loading
Loading