Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions infrastructure/account/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions infrastructure/account/cloudwatch_dashboards.tf
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ locals {
# ECS (cluster names match instance short_prefix: imms-<sub_env>-ecs-cluster)
ecs_clusters = [for sub_env in local.sub_environments_map[var.environment] : "imms-${sub_env}-ecs-cluster"]

redis_cache_cluster_id = "immunisation-redis-replication-group-001"

# Alarms
alarms = [
"_create_imms-lambda-error",
Expand Down Expand Up @@ -745,7 +747,7 @@ resource "aws_cloudwatch_dashboard" "imms-metrics-dashboard" {
"view" : "timeSeries",
"stacked" : false,
"metrics" : [
["AWS/ElastiCache", "CacheHits", "CacheClusterId", "immunisation-redis-cluster", "CacheNodeId", "0001"]
["AWS/ElastiCache", "CacheHits", "CacheClusterId", local.redis_cache_cluster_id, "CacheNodeId", "0001"]
],
"region" : var.aws_region,
"title" : "ElastiCache - CacheHits",
Expand All @@ -760,7 +762,7 @@ resource "aws_cloudwatch_dashboard" "imms-metrics-dashboard" {
"height" : 6,
"properties" : {
"metrics" : [
["AWS/ElastiCache", "CPUUtilization", "CacheClusterId", "immunisation-redis-cluster", "CacheNodeId", "0001"]
["AWS/ElastiCache", "CPUUtilization", "CacheClusterId", local.redis_cache_cluster_id, "CacheNodeId", "0001"]
],
"view" : "timeSeries",
"stacked" : false,
Expand Down
51 changes: 47 additions & 4 deletions infrastructure/account/redis_cache.tf
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
# Subnet Group for Redis
resource "aws_elasticache_subnet_group" "redis_subnet_group" {
name = "immunisation-redis-subnet-group"
subnet_ids = values(aws_subnet.private)[*].id
}

resource "aws_elasticache_cluster" "redis_cluster" {
cluster_id = "immunisation-redis-cluster"
engine = "redis"
engine_version = "7.0"
node_type = "cache.t2.micro"
num_cache_nodes = 1
parameter_group_name = "default.redis7"
Expand All @@ -9,8 +16,44 @@ resource "aws_elasticache_cluster" "redis_cluster" {
subnet_group_name = aws_elasticache_subnet_group.redis_subnet_group.name
}

# Subnet Group for Redis
resource "aws_elasticache_subnet_group" "redis_subnet_group" {
name = "immunisation-redis-subnet-group"
subnet_ids = values(aws_subnet.private)[*].id
# CloudFormation dynamic references keep the generated auth token out of Terraform state.
resource "aws_cloudformation_stack" "redis_replication_group" {
name = "immunisation-redis-replication-group"

template_body = jsonencode({
AWSTemplateFormatVersion = "2010-09-09"
Description = "Redis replication group with Secrets Manager generated auth token"
Resources = {
RedisAuthToken = {
Type = "AWS::SecretsManager::Secret"
Properties = {
Name = "imms/redis/auth-token"
Description = "Auth token for the immunisation Redis cache"
GenerateSecretString = {
ExcludePunctuation = true
PasswordLength = 32
}
}
}
RedisReplicationGroup = {
Type = "AWS::ElastiCache::ReplicationGroup"
DependsOn = "RedisAuthToken"
Properties = {
ReplicationGroupId = "immunisation-redis-replication-group"
ReplicationGroupDescription = "Redis cache for immunisation configuration data"
Engine = "redis"
EngineVersion = "7.0"
CacheNodeType = "cache.t2.micro"
NumCacheClusters = 1
CacheParameterGroupName = "default.redis7"
Port = 6379
SecurityGroupIds = [aws_security_group.lambda_redis_sg.id]
CacheSubnetGroupName = aws_elasticache_subnet_group.redis_subnet_group.name
AtRestEncryptionEnabled = true
TransitEncryptionEnabled = true
AuthToken = "{{resolve:secretsmanager:imms/redis/auth-token:SecretString}}"
}
}
}
})
}
68 changes: 34 additions & 34 deletions infrastructure/instance/ecs_batch_processor_config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,11 @@ resource "aws_iam_policy" "ecs_task_exec_policy" {
"firehose:PutRecordBatch"
],
"Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
}
]
})
Expand Down Expand Up @@ -147,40 +152,35 @@ resource "aws_ecs_task_definition" "ecs_task" {
name = "${local.short_prefix}-process-records-container"
image = var.recordprocessor_image_uri
essential = true
environment = [
{
name = "SOURCE_BUCKET_NAME"
value = aws_s3_bucket.batch_data_source_bucket.bucket
},
{
name = "ACK_BUCKET_NAME"
value = aws_s3_bucket.batch_data_destination_bucket.bucket
},
{
name = "KINESIS_STREAM_ARN"
value = local.kinesis_arn
},
{
name = "KINESIS_STREAM_NAME"
value = "${local.short_prefix}-processingdata-stream"
},
{
name = "SPLUNK_FIREHOSE_NAME"
value = module.splunk.firehose_stream_name
},
{
name = "AUDIT_TABLE_NAME"
value = aws_dynamodb_table.audit-table.name
},
{
name = "REDIS_HOST"
value = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
},
{
name = "REDIS_PORT"
value = tostring(data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port)
}
]
environment = concat(
[
{
name = "SOURCE_BUCKET_NAME"
value = aws_s3_bucket.batch_data_source_bucket.bucket
},
{
name = "ACK_BUCKET_NAME"
value = aws_s3_bucket.batch_data_destination_bucket.bucket
},
{
name = "KINESIS_STREAM_ARN"
value = local.kinesis_arn
},
{
name = "KINESIS_STREAM_NAME"
value = "${local.short_prefix}-processingdata-stream"
},
{
name = "SPLUNK_FIREHOSE_NAME"
value = module.splunk.firehose_stream_name
},
{
name = "AUDIT_TABLE_NAME"
value = aws_dynamodb_table.audit-table.name
}
],
local.redis_environment
)
logConfiguration = {
logDriver = "awslogs"
options = {
Expand Down
31 changes: 21 additions & 10 deletions infrastructure/instance/endpoints.tf
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,28 @@ locals {
"get_imms", "create_imms", "update_imms", "search_imms", "delete_imms", "not_found"
]
imms_table_name = aws_dynamodb_table.events-dynamodb-table.name
imms_lambda_env_vars = {
"DYNAMODB_TABLE_NAME" = local.imms_table_name,
"IMMUNIZATION_ENV" = local.resource_scope,
"IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4"
# except for prod and ref, any other env uses PDS int environment
"PDS_ENV" = var.pds_environment
"SPLUNK_FIREHOSE_NAME" = module.splunk.firehose_stream_name
"SQS_QUEUE_URL" = "https://sqs.${var.aws_region}.amazonaws.com/${var.immunisation_account_id}/${local.short_prefix}-ack-metadata-queue.fifo"
"REDIS_HOST" = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
"REDIS_PORT" = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
imms_lambda_env_vars = merge(
{
"DYNAMODB_TABLE_NAME" = local.imms_table_name,
"IMMUNIZATION_ENV" = local.resource_scope,
"IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4"
# except for prod and ref, any other env uses PDS int environment
"PDS_ENV" = var.pds_environment
"SPLUNK_FIREHOSE_NAME" = module.splunk.firehose_stream_name
"SQS_QUEUE_URL" = "https://sqs.${var.aws_region}.amazonaws.com/${var.immunisation_account_id}/${local.short_prefix}-ack-metadata-queue.fifo"
},
local.redis_env_vars
)
}

data "aws_iam_policy_document" "redis_auth_token_policy_document" {
statement {
effect = "Allow"
actions = ["secretsmanager:GetSecretValue"]
resources = [data.aws_secretsmanager_secret.redis_auth_token.arn]
}
}

data "aws_iam_policy_document" "imms_policy_document" {
source_policy_documents = [
templatefile("${local.policy_path}/dynamodb.json", {
Expand All @@ -54,6 +64,7 @@ data "aws_iam_policy_document" "imms_policy_document" {
templatefile("${local.policy_path}/secret_manager.json", {
"account_id" : data.aws_caller_identity.current.account_id
}),
data.aws_iam_policy_document.redis_auth_token_policy_document.json,
file("${local.policy_path}/ec2_network_interfaces.json")
]
}
Expand Down
32 changes: 19 additions & 13 deletions infrastructure/instance/file_name_processor.tf
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ resource "aws_iam_policy" "filenameprocessor_lambda_exec_policy" {
],
"Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
"Effect" : "Allow",
"Action" : [
Expand Down Expand Up @@ -240,19 +245,20 @@ resource "aws_lambda_function" "file_processor_lambda" {
}

environment {
variables = {
ACCOUNT_ID = var.immunisation_account_id
DPS_ACCOUNT_ID = var.dspp_core_account_id
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DPS_BUCKET_NAME = var.dspp_submission_s3_bucket_name
QUEUE_URL = aws_sqs_queue.batch_file_created.url
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
AUDIT_TABLE_NAME = aws_dynamodb_table.audit-table.name
AUDIT_TABLE_TTL_DAYS = 60
}
variables = merge(
{
ACCOUNT_ID = var.immunisation_account_id
DPS_ACCOUNT_ID = var.dspp_core_account_id
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DPS_BUCKET_NAME = var.dspp_submission_s3_bucket_name
QUEUE_URL = aws_sqs_queue.batch_file_created.url
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
AUDIT_TABLE_NAME = aws_dynamodb_table.audit-table.name
AUDIT_TABLE_TTL_DAYS = 60
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn
reserved_concurrent_executions = local.is_temp ? -1 : 20
Expand Down
22 changes: 14 additions & 8 deletions infrastructure/instance/forwarder_lambda.tf
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ resource "aws_iam_policy" "forwarding_lambda_exec_policy" {
]
Resource = aws_sqs_queue.fifo_queue.arn
},
{
Effect = "Allow"
Action = "secretsmanager:GetSecretValue"
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
Effect = "Allow",
Action = [
Expand Down Expand Up @@ -146,14 +151,15 @@ resource "aws_lambda_function" "forwarding_lambda" {
}

environment {
variables = {
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DYNAMODB_TABLE_NAME = aws_dynamodb_table.events-dynamodb-table.name
SQS_QUEUE_URL = aws_sqs_queue.fifo_queue.url
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
}
variables = merge(
{
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DYNAMODB_TABLE_NAME = aws_dynamodb_table.events-dynamodb-table.name
SQS_QUEUE_URL = aws_sqs_queue.fifo_queue.url
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn
depends_on = [
Expand Down
24 changes: 22 additions & 2 deletions infrastructure/instance/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,28 @@ data "aws_kms_key" "existing_dynamo_encryption_key" {
key_id = "alias/imms-event-dynamodb-encryption"
}

data "aws_elasticache_cluster" "existing_redis" {
cluster_id = "immunisation-redis-cluster"
data "aws_elasticache_replication_group" "existing_redis" {
replication_group_id = "immunisation-redis-replication-group"
}

data "aws_secretsmanager_secret" "redis_auth_token" {
name = "imms/redis/auth-token"
}

locals {
redis_env_vars = {
REDIS_HOST = data.aws_elasticache_replication_group.existing_redis.primary_endpoint_address
REDIS_PORT = tostring(data.aws_elasticache_replication_group.existing_redis.port)
REDIS_SSL = "true"
REDIS_AUTH_TOKEN_SECRET_NAME = data.aws_secretsmanager_secret.redis_auth_token.name
}

redis_environment = [
for name, value in local.redis_env_vars : {
name = name
value = value
}
]
}

data "aws_security_group" "existing_securitygroup" {
Expand Down
18 changes: 12 additions & 6 deletions infrastructure/instance/redis_sync_lambda.tf
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ resource "aws_iam_policy" "redis_sync_lambda_exec_policy" {
],
Resource : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
Effect = "Allow"
Action = "lambda:InvokeFunction"
Expand Down Expand Up @@ -155,12 +160,13 @@ resource "aws_lambda_function" "redis_sync_lambda" {
}

environment {
variables = {
CONFIG_BUCKET_NAME = local.config_bucket_name
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
}
variables = merge(
{
CONFIG_BUCKET_NAME = local.config_bucket_name
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn

Expand Down
Loading
Loading