Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
138 changes: 138 additions & 0 deletions .github/workflows/deploy-backend.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
name: Deploy Backend

on:
workflow_call:
inputs:
apigee_environment:
required: true
type: string
create_mns_subscription:
required: false
type: boolean
default: true
environment:
required: true
type: string
sub_environment:
required: true
type: string
workflow_dispatch:
inputs:
apigee_environment:
type: choice
description: Select the Apigee proxy environment
options:
- internal-dev
- int
- ref
- prod
create_mns_subscription:
description: Create an MNS Subscription. Only available in dev
required: false
type: boolean
default: true
environment:
type: string
description: Select the backend environment
options:
- dev
- preprod
- prod
sub_environment:
type: string
description: Set the sub environment name e.g. pr-xxx, or green/blue in higher environments

jobs:
terraform-plan:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
env: # Sonarcloud - do not allow direct usage of untrusted data
APIGEE_ENVIRONMENT: ${{ inputs.apigee_environment }}
BACKEND_ENVIRONMENT: ${{ inputs.environment }}
BACKEND_SUB_ENVIRONMENT: ${{ inputs.sub_environment }}
permissions:
id-token: write
contents: read
steps:
- name: Connect to AWS
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ vars.AWS_ACCOUNT_ID }}:role/auto-ops
role-session-name: github-actions

- name: Whoami
run: aws sts get-caller-identity

- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8

- uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd
with:
terraform_version: "1.12.2"

- name: Terraform Init
working-directory: ${{ vars.TERRAFORM_DIR_PATH }}
run: make init apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT

- name: Terraform Plan
working-directory: ${{ vars.TERRAFORM_DIR_PATH }}
run: make plan apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT

terraform-apply:
needs: terraform-plan
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
env: # Sonarcloud - do not allow direct usage of untrusted data
APIGEE_ENVIRONMENT: ${{ inputs.apigee_environment }}
BACKEND_ENVIRONMENT: ${{ inputs.environment }}
BACKEND_SUB_ENVIRONMENT: ${{ inputs.sub_environment }}
permissions:
id-token: write
contents: read
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8

- uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a
with:
aws-region: eu-west-2
role-to-assume: arn:aws:iam::${{ vars.AWS_ACCOUNT_ID }}:role/auto-ops
role-session-name: github-actions

- uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd
with:
terraform_version: "1.12.2"

- name: Terraform Init
working-directory: ${{ vars.TERRAFORM_DIR_PATH }}
run: make init apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT

- name: Terraform Apply
working-directory: ${{ vars.TERRAFORM_DIR_PATH }}
run: |
make apply apigee_environment=$APIGEE_ENVIRONMENT environment=$BACKEND_ENVIRONMENT sub_environment=$BACKEND_SUB_ENVIRONMENT
echo "ID_SYNC_QUEUE_ARN=$(make -s output name=id_sync_queue_arn)" >> $GITHUB_ENV

- name: Install poetry
if: ${{ inputs.environment == 'dev' && inputs.create_mns_subscription }}
run: pip install poetry==2.1.4

- uses: actions/setup-python@v5
if: ${{ inputs.environment == 'dev' && inputs.create_mns_subscription }}
with:
python-version: 3.11
cache: 'poetry'

- name: Create MNS Subscription
if: ${{ inputs.environment == 'dev' && inputs.create_mns_subscription }}
working-directory: './lambdas/mns_subscription'
env:
APIGEE_ENVIRONMENT: ${{ inputs.apigee_environment }}
SQS_ARN: ${{ env.ID_SYNC_QUEUE_ARN }}
run: |
poetry install --no-root
echo "Subscribing SQS to MNS for notifications..."
make subscribe
Comment thread
dlzhry2nhs marked this conversation as resolved.
20 changes: 0 additions & 20 deletions .github/workflows/deploy-blue-green.yml

This file was deleted.

162 changes: 0 additions & 162 deletions .github/workflows/deploy-template.yml

This file was deleted.

5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,16 @@ build-proxy:
scripts/build_proxy.sh

#Files to loop over in release
_dist_include="pytest.ini poetry.lock poetry.toml pyproject.toml Makefile build/. specification sandbox terraform scripts $(PYTHON_PROJECT_DIRS) $(PYTHON_LAMBDA_DEPENDENCIES)"
# VED-811: remove everything except for proxy related files as we move to Github Actions for backend deployment
_dist_include="pytest.ini poetry.lock poetry.toml pyproject.toml Makefile build/. specification sandbox terraform scripts"


#Create /dist/ sub-directory and copy files into directory
#Ensure full dir structure is preserved for Lambdas
release: clean publish build-proxy
mkdir -p dist
for f in $(_dist_include); do cp -r $$f dist; done
for f in $(PYTHON_PROJECT_DIRS); do cp --parents -r $$f dist; done
cp ecs-proxies-deploy.yml dist/ecs-deploy-sandbox.yml
cp ecs-proxies-deploy.yml dist/ecs-deploy-internal-qa-sandbox.yml
cp ecs-proxies-deploy.yml dist/ecs-deploy-internal-dev-sandbox.yml
Expand Down
2 changes: 1 addition & 1 deletion azure/templates/post-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ steps:
echo "Subscribing SQS to MNS for notifications..."
make subscribe
displayName: "Run MNS Subscription"
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/mns_subscription"
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/lambdas/mns_subscription"
env:
SQS_ARN: "$(ID_SYNC_QUEUE_ARN)"

Expand Down
2 changes: 2 additions & 0 deletions backend/src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,5 @@ class Urls:

GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE = "Unable to process request. Issue may be transient."
SUPPLIER_PERMISSIONS_HASH_KEY = "supplier_permissions"
# Maximum response size for an AWS Lambda function
MAX_RESPONSE_SIZE_BYTES = 6 * 1024 * 1024
4 changes: 2 additions & 2 deletions backend/src/search_imms_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from fhir_controller import FhirController, make_controller
from models.errors import Severity, Code, create_operation_outcome
from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE
from constants import GENERIC_SERVER_ERROR_DIAGNOSTICS_MESSAGE, MAX_RESPONSE_SIZE_BYTES
from log_structure import function_info
import base64
import urllib.parse
Expand Down Expand Up @@ -57,7 +57,7 @@ def search_imms(event: events.APIGatewayProxyEventV1, controller: FhirController
result_json = json.dumps(response)
result_size = len(result_json.encode("utf-8"))

if result_size > 6 * 1024 * 1024:
if result_size > MAX_RESPONSE_SIZE_BYTES:
exp_error = create_operation_outcome(
resource_id=str(uuid.uuid4()),
severity=Severity.error,
Expand Down
1 change: 0 additions & 1 deletion backend/tests/sample_data/sample_input_search_imms.json

This file was deleted.

7 changes: 2 additions & 5 deletions backend/tests/test_search_imms.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,12 @@ def test_search_immunizations_get_id_from_body_imms_identifer(self):
self.controller.get_immunization_by_identifier.assert_called_once_with(lambda_event)
self.assertDictEqual(exp_res, act_res)

@patch("search_imms_handler.MAX_RESPONSE_SIZE_BYTES", 10)
def test_search_immunizations_lambda_size_limit(self):
"""it should return 400 as search returned too many results."""
lambda_event = {"pathParameters": {"id": "an-id"}, "body": None}
request_file = script_location / "sample_data" / "sample_input_search_imms.json"
with open(request_file) as f:
exp_res = json.load(f)
self.controller.search_immunizations.return_value = json.dumps(exp_res)

self.controller.search_immunizations.return_value = exp_res
self.controller.search_immunizations.return_value = {"response": "size is larger than lambda limit"}

# When
act_res = search_imms(lambda_event, self.controller)
Expand Down
Loading
Loading