Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions infrastructure/terraform/components/api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ No requirements.
| <a name="input_aws_account_id"></a> [aws\_account\_id](#input\_aws\_account\_id) | The AWS Account ID (numeric) | `string` | n/a | yes |
| <a name="input_ca_pem_filename"></a> [ca\_pem\_filename](#input\_ca\_pem\_filename) | Filename for the CA truststore file within the s3 bucket | `string` | `null` | no |
| <a name="input_component"></a> [component](#input\_component) | The variable encapsulating the name of this component | `string` | `"supapi"` | no |
| <a name="input_core_account_id"></a> [core\_account\_id](#input\_core\_account\_id) | AWS Account ID for Core | `string` | `"000000000000"` | no |
| <a name="input_core_environment"></a> [core\_environment](#input\_core\_environment) | Environment of Core | `string` | `"prod"` | no |
| <a name="input_default_tags"></a> [default\_tags](#input\_default\_tags) | A map of default tags to apply to all taggable resources within the component | `map(string)` | `{}` | no |
| <a name="input_enable_backups"></a> [enable\_backups](#input\_enable\_backups) | Enable backups | `bool` | `false` | no |
| <a name="input_environment"></a> [environment](#input\_environment) | The name of the tfscaffold environment | `string` | n/a | yes |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ module "letter_updates_transformer" {
log_subscription_role_arn = local.acct.log_subscription_role_arn

lambda_env_vars = merge(local.common_lambda_env_vars, {
EVENTPUB_SNS_TOPIC_ARN = "${module.eventpub.sns_topic.arn}"
EVENTPUB_SNS_TOPIC_ARN = "${module.eventpub.sns_topic.arn}",
EVENT_SOURCE = "/data-plane/supplier-api/${var.group}/${var.environment}/letters"
})
}

Expand Down
2 changes: 1 addition & 1 deletion internal/events/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,5 +50,5 @@
"typecheck": "tsc --noEmit"
},
"types": "dist/index.d.ts",
"version": "1.0.5"
"version": "1.0.6"
}
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.ACCEPTED.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.FORWARDED.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.RETURNED.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,18 @@ jest.mock("crypto", () => ({
randomBytes: (size: number) => randomBytes[String(size)],
}));

describe("letter-updates-transformer Lambda", () => {
const mockedDeps: jest.Mocked<Deps> = {
snsClient: { send: jest.fn() } as unknown as SNSClient,
logger: { info: jest.fn(), error: jest.fn() } as unknown as pino.Logger,
env: {
EVENTPUB_SNS_TOPIC_ARN: "arn:aws:sns:region:account:topic",
} as unknown as EnvVars,
} as Deps;
const eventSource =
"/data-plane/supplier-api/nhs-supplier-api-dev/main/letters";
const mockedDeps: jest.Mocked<Deps> = {
snsClient: { send: jest.fn() } as unknown as SNSClient,
logger: { info: jest.fn(), error: jest.fn() } as unknown as pino.Logger,
env: {
EVENTPUB_SNS_TOPIC_ARN: "arn:aws:sns:region:account:topic",
EVENT_SOURCE: eventSource,
} as unknown as EnvVars,
} as Deps;

describe("letter-updates-transformer Lambda", () => {
beforeEach(() => {
jest.useFakeTimers();
});
Expand All @@ -50,7 +53,9 @@ describe("letter-updates-transformer Lambda", () => {
const newLetter = generateLetter("PRINTED");
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand All @@ -76,7 +81,9 @@ describe("letter-updates-transformer Lambda", () => {
newLetter.reasonCode = "R1";
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand All @@ -103,7 +110,9 @@ describe("letter-updates-transformer Lambda", () => {
newLetter.reasonCode = "R2";
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand Down Expand Up @@ -135,14 +144,28 @@ describe("letter-updates-transformer Lambda", () => {
expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});

it("does not publish non-modify events", async () => {
it("publishes INSERT events", async () => {
const handler = createHandler(mockedDeps);
const newLetter = generateLetter("ACCEPTED");
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

const testData = generateKinesisEvent([generateInsertRecord(newLetter)]);
await handler(testData, mockDeep<Context>(), jest.fn());

expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
expect(mockedDeps.snsClient.send).toHaveBeenCalledWith(
expect.objectContaining({
input: expect.objectContaining({
TopicArn: "arn:aws:sns:region:account:topic",
PublishBatchRequestEntries: expectedEntries,
}),
}),
);
});

it("does not publish invalid letter data", async () => {
Expand All @@ -159,6 +182,53 @@ describe("letter-updates-transformer Lambda", () => {

expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});

it("throws error when kinesis data contains malformed JSON", async () => {
const handler = createHandler(mockedDeps);

// Create a Kinesis event with malformed JSON data
const malformedKinesisEvent: KinesisStreamEvent = {
Records: [{
kinesis: {
data: Buffer.from("invalid-json-data").toString("base64"),
sequenceNumber: "12345"
}
} as any]
};

await expect(
handler(malformedKinesisEvent, mockDeep<Context>(), jest.fn())
).rejects.toThrow();

expect(mockedDeps.logger.error).toHaveBeenCalledWith(
expect.objectContaining({
description: "Error extracting payload",
error: expect.any(Error),
record: expect.objectContaining({
kinesis: expect.objectContaining({
data: Buffer.from("invalid-json-data").toString("base64")
})
})
})
);
});

it("handles events with no records", async () => {
const handler = createHandler(mockedDeps);

// Create a Kinesis event with empty Records array
const emptyKinesisEvent: KinesisStreamEvent = { Records: [] };

await handler(emptyKinesisEvent, mockDeep<Context>(), jest.fn());

expect(mockedDeps.logger.info).toHaveBeenCalledWith(
expect.objectContaining({
description: "Number of records",
count: 0
})
);
expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});
});

describe("Batching", () => {
Expand All @@ -168,7 +238,7 @@ describe("letter-updates-transformer Lambda", () => {
const newLetters = generateLetters(10, "PRINTED");
const expectedEntries = newLetters.map((letter) =>
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
);

Expand Down Expand Up @@ -197,19 +267,19 @@ describe("letter-updates-transformer Lambda", () => {
newLetters.slice(0, 10).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
newLetters.slice(10, 20).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
newLetters.slice(20).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
];
Expand Down
1 change: 1 addition & 0 deletions lambdas/letter-updates-transformer/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { z } from "zod";

const EnvVarsSchema = z.object({
EVENTPUB_SNS_TOPIC_ARN: z.string(),
EVENT_SOURCE: z.string(),
});

export type EnvVars = z.infer<typeof EnvVarsSchema>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,17 @@ const BATCH_SIZE = 10;
export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
return async (streamEvent: KinesisStreamEvent) => {
deps.logger.info({ description: "Received event", streamEvent });
deps.logger.info({ description: "Number of records", count: streamEvent.Records?.length || 0 });

const cloudEvents: LetterEvent[] = streamEvent.Records.map((record) =>
// Ensure logging by extracting all records first
const ddbRecords: DynamoDBRecord[] = streamEvent.Records.map((record) =>
extractPayload(record, deps),
)
.filter((record) => record.eventName === "MODIFY")
.filter(
(record) =>
isChanged(record, "status") || isChanged(record, "reasonCode"),
)
);

const cloudEvents: LetterEvent[] = ddbRecords
.filter((record) => filterRecord(record, deps))
.map((element) => extractNewLetter(element))
.map((element) => mapLetterToCloudEvent(element));
.map((element) => mapLetterToCloudEvent(element, deps.env.EVENT_SOURCE));

for (const batch of generateBatches(cloudEvents)) {
deps.logger.info({
Expand All @@ -50,14 +50,47 @@ export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
};
}

function filterRecord(record: DynamoDBRecord, deps: Deps): boolean {
let allowEvent = false;
if (record.eventName === "INSERT") {
allowEvent = true;
}

if (
record.eventName === "MODIFY" &&
(isChanged(record, "status") || isChanged(record, "reasonCode"))
) {
allowEvent = true;
}

deps.logger.info({
description: "Filtering record",
eventName: record.eventName,
eventId: record.eventID,
allowEvent,
});

return allowEvent;
}

function extractPayload(
record: KinesisStreamRecord,
deps: Deps,
): DynamoDBRecord {
// Kinesis data is base64 encoded
const payload = Buffer.from(record.kinesis.data, "base64").toString("utf8");
deps.logger.info({ description: "Extracted dynamoDBRecord", payload });
return JSON.parse(payload);
try {
deps.logger.info({ description: "Processing Kinesis record", recordId: record.kinesis.sequenceNumber });

// Kinesis data is base64 encoded
const payload = Buffer.from(record.kinesis.data, "base64").toString("utf8");
deps.logger.info({ description: "Decoded payload", payload });

const jsonParsed = JSON.parse(payload);
deps.logger.info({ description: "Extracted dynamoDBRecord", jsonParsed });
return jsonParsed;
} catch (error) {
deps.logger.error({ description: "Error extracting payload", error, record });
throw error;
}
}

function isChanged(record: DynamoDBRecord, property: string): boolean {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,16 @@ describe("letter-mapper", () => {
reasonText: "Reason text",
updatedAt: "2025-11-24T15:55:18.000Z",
} as Letter;
const event = mapLetterToCloudEvent(letter);
const source = "/data-plane/supplier-api/nhs-supplier-api-dev/main/letters";
const event = mapLetterToCloudEvent(letter, source);

// Check it conforms to the letter event schema - parse will throw an error if not
$LetterEvent.parse(event);
expect(event.type).toBe("uk.nhs.notify.supplier-api.letter.PRINTED.v1");
expect(event.dataschema).toBe(
`https://notify.nhs.uk/cloudevents/schemas/supplier-api/letter.PRINTED.${event.dataschemaversion}.schema.json`,
);
expect(event.dataschemaversion).toBe("1.0.5");
expect(event.dataschemaversion).toBe("1.0.6");
expect(event.subject).toBe("letter-origin/supplier-api/letter/id1");
expect(event.time).toBe("2025-11-24T15:55:18.000Z");
expect(event.recordedtime).toBe("2025-11-24T15:55:18.000Z");
Expand All @@ -41,5 +42,6 @@ describe("letter-mapper", () => {
event: event.id,
},
});
expect(event.source).toBe(source);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { LetterForEventPub } from "../types";

export default function mapLetterToCloudEvent(
letter: LetterForEventPub,
source: string,
): LetterEvent {
const eventId = randomUUID();
const dataschemaversion = eventSchemaPackage.version;
Expand All @@ -15,7 +16,7 @@ export default function mapLetterToCloudEvent(
plane: "data",
dataschema: `https://notify.nhs.uk/cloudevents/schemas/supplier-api/letter.${letter.status}.${dataschemaversion}.schema.json`,
dataschemaversion,
source: "/data-plane/supplier-api/letters",
source,
subject: `letter-origin/supplier-api/letter/${letter.id}`,

data: {
Expand Down
Loading