Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/automated-pr-validator.yml
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ jobs:
id: changed-files
run: |
git remote set-branches origin main && git fetch --depth 1 origin main && git branch main origin/main
echo "CHANGED_FILES=$(git diff main --name-only | grep '.py$' | tr '\n' ' ')" >> $GITHUB_OUTPUT
echo "CHANGED_FILES=$(git diff main --name-status | grep -E '^[^D].*\.py$' | cut -f2 | tr '\n' ' ')" >> $GITHUB_OUTPUT

- name: Run black
id: black
Expand Down
12 changes: 12 additions & 0 deletions .github/workflows/base-lambda-layer-reusable-publish-all.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,3 +87,15 @@ jobs:
lambda_layer_name: alerting_lambda_layer
secrets:
AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }}

deploy_files_lambda_layer:
name: Deploy files_lambda_layer
uses: ./.github/workflows/base-lambda-layer-reusable-publish.yml
with:
environment: ${{ inputs.environment}}
python_version: ${{ inputs.python_version }}
build_branch: ${{ inputs.build_branch }}
sandbox: ${{ inputs.sandbox }}
lambda_layer_name: files_lambda_layer
secrets:
AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }}
2 changes: 1 addition & 1 deletion .github/workflows/base-lambdas-reusable-deploy-all.yml
Original file line number Diff line number Diff line change
Expand Up @@ -695,7 +695,7 @@ jobs:
sandbox: ${{ inputs.sandbox }}
lambda_handler_name: document_reference_virus_scan_handler
lambda_aws_name: DocumentReferenceVirusScanCheck
lambda_layer_names: "core_lambda_layer"
lambda_layer_names: "core_lambda_layer,files_lambda_layer"
secrets:
AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }}

Expand Down
2 changes: 1 addition & 1 deletion .lintstagedrc
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
"./app/node_modules/prettier/bin/prettier.cjs --write"
],
"*.py": [
"./lambdas/venv/bin/ruff check --fix",
"./lambdas/venv/bin/python3 -m black",
"./lambdas/venv/bin/ruff check ./lambdas",
"./lambdas/venv/bin/python3 -m isort --profile black",
]
}
8 changes: 6 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ GITHUB_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_github_runner.txt
TEST_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_test.txt
CORE_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_core_lambda_layer.txt
DATA_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_data_lambda_layer.txt
FILES_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_files_lambda_layer.txt
REPORTS_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_reports_lambda_layer.txt
ALERTING_REQUIREMENTS=$(LAMBDA_LAYER_REQUIREMENTS_PATH)/requirements_alerting_lambda_layer.txt
EDGE_REQUIREMENTS=$(REQUIREMENTS_PATH)/requirements_edge_lambda.txt
Expand Down Expand Up @@ -86,18 +87,19 @@ format:
@if [ $(FORMAT_ALL) = true ]; then \
CHANGED_FILES=''; \
else \
CHANGED_FILES=$$(git diff main --name-only | grep '.py$$' | xargs); \
CHANGED_FILES=$$(git diff main --name-status | grep -E '^[^D].*\.py$$' | cut -f2 | xargs); \
echo $$CHANGED_FILES; \
if [ -z "$$CHANGED_FILES" ]; then echo "No changed files to format"; exit 0; fi; \
fi; \
$(VENV_PATH_PREFIX)/bin/python3 -m black $$CHANGED_FILES; \
$(VENV_PATH_PREFIX)/bin/ruff check $$CHANGED_FILES --fix; \
$(VENV_PATH_PREFIX)/bin/python3 -m black $$CHANGED_FILES; \
$(VENV_PATH_PREFIX)/bin/python3 -m isort --profile black $$CHANGED_FILES

sort-requirements:
sort -o $(TEST_REQUIREMENTS) $(TEST_REQUIREMENTS)
sort -o $(CORE_REQUIREMENTS) $(CORE_REQUIREMENTS)
sort -o $(DATA_REQUIREMENTS) $(DATA_REQUIREMENTS)
sort -o $(FILES_REQUIREMENTS) $(FILES_REQUIREMENTS)
sort -o $(REPORTS_REQUIREMENTS) $(REPORTS_REQUIREMENTS)
sort -o $(ALERTING_REQUIREMENTS) $(ALERTING_REQUIREMENTS)

Expand All @@ -106,6 +108,7 @@ check-packages:
./lambdas/venv/bin/pip-audit -r $(TEST_REQUIREMENTS)
./lambdas/venv/bin/pip-audit -r $(CORE_REQUIREMENTS)
./lambdas/venv/bin/pip-audit -r $(DATA_REQUIREMENTS)
./lambdas/venv/bin/pip-audit -r $(FILES_REQUIREMENTS)
./lambdas/venv/bin/pip-audit -r $(REPORTS_REQUIREMENTS)
./lambdas/venv/bin/pip-audit -r $(ALERTING_REQUIREMENTS)

Expand Down Expand Up @@ -206,6 +209,7 @@ env:
@./lambdas/venv/bin/pip3 install -r $(TEST_REQUIREMENTS) --no-cache-dir
@./lambdas/venv/bin/pip3 install -r $(CORE_REQUIREMENTS) --no-cache-dir
@./lambdas/venv/bin/pip3 install -r $(DATA_REQUIREMENTS) --no-cache-dir
@./lambdas/venv/bin/pip3 install -r $(FILES_REQUIREMENTS) --no-cache-dir
@./lambdas/venv/bin/pip3 install -r $(REPORTS_REQUIREMENTS) --no-cache-dir
@./lambdas/venv/bin/pip3 install -r $(ALERTING_REQUIREMENTS) --no-cache-dir
@echo " "
Expand Down
1 change: 1 addition & 0 deletions lambdas/enums/document_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ class DocumentStatus(Enum):
FORBIDDEN = ("forbidden", "UC_4003")
NOT_FOUND = ("not-found", "UC_4004")
INFECTED = ("infected", "UC_4005")
INVALID = ("invalid", "UC_4006")

@property
def code(self):
Expand Down
12 changes: 10 additions & 2 deletions lambdas/enums/lambda_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ def create_error_response(
return error_response

def to_str(
self, params: Optional[dict] = None, details: Optional[str] = None
self,
params: Optional[dict] = None,
details: Optional[str] = None,
) -> str:
message = self.value["message"]
if "%" in message and params:
Expand All @@ -59,7 +61,9 @@ def create_error_body(
**kwargs,
) -> str:
return self.create_error_response(
params=params, details=details, **kwargs
params=params,
details=details,
**kwargs,
).create()

"""
Expand Down Expand Up @@ -440,6 +444,10 @@ def create_error_body(
"err_code": "UC_4005",
"message": "Some of the given document references are not referring to clean files",
}
UploadConfirmResultFilesInvalid = {
"err_code": "UC_4006",
"message": "Some of the given document references are password protected or corrupted",
}
UploadConfirmResultAWSFailure = {
"err_code": "UC_5004",
"message": "Error occurred with an AWS service",
Expand Down
1 change: 1 addition & 0 deletions lambdas/enums/virus_scan_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ class VirusScanResult(StrEnum):
INFECTED_ALLOWED = "InfectedAllowed"
UNSCANNABLE = "Unscannable"
ERROR = "Error"
INVALID = "Invalid"


SCAN_RESULT_TAG_KEY = "scan-result"
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
msoffcrypto-tool==6.0.0
2 changes: 1 addition & 1 deletion lambdas/ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ line-length = 130
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
# COM812: Enforce trailing commas on multi-line constructs.
select = ["E", "F", "COM812"]
select = ["E", "F", "COM812", "RET505"]
ignore = []

# Allow autofix for all enabled rules (when `--fix`) is provided.
Expand Down
57 changes: 39 additions & 18 deletions lambdas/services/base/s3_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,19 +40,23 @@ def __init__(self, custom_aws_role=None):
if custom_aws_role:
self.iam_service = IAMService()
self.custom_client, self.expiration_time = self.iam_service.assume_role(
self.custom_aws_role, "s3", config=self.config
self.custom_aws_role,
"s3",
config=self.config,
)

# S3 Location should be a minimum of a s3_object_key but can also be a directory location in the form of
# {{directory}}/{{s3_object_key}}
def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: str):
if self.custom_client:
if datetime.now(timezone.utc) > self.expiration_time - timedelta(
minutes=10
minutes=10,
):
logger.info(S3Service.EXPIRED_SESSION_WARNING)
self.custom_client, self.expiration_time = self.iam_service.assume_role(
self.custom_aws_role, "s3", config=self.config
self.custom_aws_role,
"s3",
config=self.config,
)
return self.custom_client.generate_presigned_post(
s3_bucket_name,
Expand All @@ -65,11 +69,13 @@ def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: s
def create_put_presigned_url(self, s3_bucket_name: str, file_key: str):
if self.custom_client:
if datetime.now(timezone.utc) > self.expiration_time - timedelta(
minutes=10
minutes=10,
):
logger.info(S3Service.EXPIRED_SESSION_WARNING)
self.custom_client, self.expiration_time = self.iam_service.assume_role(
self.custom_aws_role, "s3", config=self.config
self.custom_aws_role,
"s3",
config=self.config,
)
logger.info("Generating presigned URL")
return self.custom_client.generate_presigned_url(
Expand All @@ -82,11 +88,13 @@ def create_put_presigned_url(self, s3_bucket_name: str, file_key: str):
def create_download_presigned_url(self, s3_bucket_name: str, file_key: str):
if self.custom_client:
if datetime.now(timezone.utc) > self.expiration_time - timedelta(
minutes=10
minutes=10,
):
logger.info(S3Service.EXPIRED_SESSION_WARNING)
self.custom_client, self.expiration_time = self.iam_service.assume_role(
self.custom_aws_role, "s3", config=self.config
self.custom_aws_role,
"s3",
config=self.config,
)
logger.info("Generating presigned URL")
return self.custom_client.generate_presigned_url(
Expand Down Expand Up @@ -143,32 +151,40 @@ def copy_across_bucket(
if_none_match,
False,
)
else:
raise e
raise e
else:
logger.error(f"Copy failed: {e}")
raise e

def delete_object(
self, s3_bucket_name: str, file_key: str, version_id: str | None = None
self,
s3_bucket_name: str,
file_key: str,
version_id: str | None = None,
):
if version_id is None:
return self.client.delete_object(Bucket=s3_bucket_name, Key=file_key)

return self.client.delete_object(
Bucket=s3_bucket_name, Key=file_key, VersionId=version_id
Bucket=s3_bucket_name,
Key=file_key,
VersionId=version_id,
)

def create_object_tag(
self, s3_bucket_name: str, file_key: str, tag_key: str, tag_value: str
self,
s3_bucket_name: str,
file_key: str,
tag_key: str,
tag_value: str,
):
return self.client.put_object_tagging(
Bucket=s3_bucket_name,
Key=file_key,
Tagging={
"TagSet": [
{"Key": tag_key, "Value": tag_value},
]
],
},
)

Expand All @@ -182,7 +198,7 @@ def get_tag_value(self, s3_bucket_name: str, file_key: str, tag_key: str) -> str
return key_value_pair["Value"]

raise TagNotFoundException(
f"Object {file_key} doesn't have a tag of key {tag_key}"
f"Object {file_key} doesn't have a tag of key {tag_key}",
)

def file_exist_on_s3(self, s3_bucket_name: str, file_key: str) -> bool:
Expand Down Expand Up @@ -218,8 +234,11 @@ def get_file_size(self, s3_bucket_name: str, object_key: str) -> int:
def get_head_object(self, bucket: str, key: str):
return self.client.head_object(Bucket=bucket, Key=key)

def get_object_stream(self, bucket: str, key: str):
response = self.client.get_object(Bucket=bucket, Key=key)
def get_object_stream(self, bucket: str, key: str, byte_range: str | None = None):
params = {"Bucket": bucket, "Key": key}
if byte_range:
params["Range"] = byte_range
response = self.client.get_object(**params)
return response.get("Body")

def stream_s3_object_to_memory(self, bucket: str, key: str) -> BytesIO:
Expand Down Expand Up @@ -247,11 +266,13 @@ def upload_file_obj(
logger.info(f"Uploaded file object to s3://{s3_bucket_name}/{file_key}")
except ClientError as e:
logger.error(
f"Failed to upload file object to s3://{s3_bucket_name}/{file_key} - {e}"
f"Failed to upload file object to s3://{s3_bucket_name}/{file_key} - {e}",
)
raise e

def save_or_create_file(self, source_bucket: str, file_key: str, body: bytes):
return self.client.put_object(
Bucket=source_bucket, Key=file_key, Body=BytesIO(body)
Bucket=source_bucket,
Key=file_key,
Body=BytesIO(body),
)
9 changes: 7 additions & 2 deletions lambdas/services/get_document_upload_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,16 @@ def _determine_document_status(self, doc_ref, nhs_number):
if doc_ref.doc_status == "cancelled":
if doc_ref.virus_scanner_result == VirusScanResult.INFECTED:
return DocumentStatus.INFECTED.display, DocumentStatus.INFECTED.code
if doc_ref.virus_scanner_result == VirusScanResult.INVALID:
return DocumentStatus.INVALID.display, DocumentStatus.INVALID.code
return DocumentStatus.CANCELLED.display, DocumentStatus.CANCELLED.code

return doc_ref.doc_status, None

def get_document_references_by_id(
self, nhs_number: str, document_ids: list[str]
self,
nhs_number: str,
document_ids: list[str],
) -> dict:
"""
Checks the status of a list of documents for a given patient.
Expand All @@ -42,7 +46,8 @@ def get_document_references_by_id(
A dictionary with a list of document IDs and their corresponding statuses.
"""
found_docs = self.document_service.get_batch_document_references_by_id(
document_ids, SupportedDocumentTypes.LG
document_ids,
SupportedDocumentTypes.LG,
)
found_docs_by_id = {doc.id: doc for doc in found_docs}
results = {}
Expand Down
34 changes: 0 additions & 34 deletions lambdas/services/pdf_stitch_service.py

This file was deleted.

Loading
Loading