Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"debug": "JEKYLL_ENV=development BUNDLE_GEMFILE=Gemfile bundle exec jekyll serve --config _config.yml,_config.dev.yml,_config.version.yml --limit_posts 100 --trace",
"generate-includes": "./generate-includes.sh",
"lint": "echo \"Documentation module has no code to lint\"",
"lint:fix": "echo \"Documentation module has no code to lint\"",
"test:unit": "echo \"Documentation module has no unit tests\"",
"typecheck": "echo \"Documentation module has no typescript to typecheck\""
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ module "letter_updates_transformer" {
log_subscription_role_arn = local.acct.log_subscription_role_arn

lambda_env_vars = merge(local.common_lambda_env_vars, {
EVENTPUB_SNS_TOPIC_ARN = "${module.eventpub.sns_topic.arn}"
EVENTPUB_SNS_TOPIC_ARN = "${module.eventpub.sns_topic.arn}",
EVENT_SOURCE = "/data-plane/supplier-api/${var.group}/${var.environment}/letters"
})
}

Expand Down
2 changes: 1 addition & 1 deletion internal/events/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,5 +50,5 @@
"typecheck": "tsc --noEmit"
},
"types": "dist/index.d.ts",
"version": "1.0.8"
"version": "1.0.9"
}
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.ACCEPTED.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.FORWARDED.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
2 changes: 1 addition & 1 deletion internal/events/schemas/examples/letter.RETURNED.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"recordedtime": "2025-08-28T08:45:00.000Z",
"severitynumber": 2,
"severitytext": "INFO",
"source": "/data-plane/supplier-api/prod/update-status",
"source": "/data-plane/supplier-api/nhs-supplier-api-prod/main/update-status",
"specversion": "1.0",
"subject": "letter-origin/letter-rendering/letter/f47ac10b-58cc-4372-a567-0e02b2c3d479",
"time": "2025-08-28T08:45:00.000Z",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,18 @@ jest.mock("crypto", () => ({
randomBytes: (size: number) => randomBytes[String(size)],
}));

describe("letter-updates-transformer Lambda", () => {
const mockedDeps: jest.Mocked<Deps> = {
snsClient: { send: jest.fn() } as unknown as SNSClient,
logger: { info: jest.fn(), error: jest.fn() } as unknown as pino.Logger,
env: {
EVENTPUB_SNS_TOPIC_ARN: "arn:aws:sns:region:account:topic",
} as unknown as EnvVars,
} as Deps;
const eventSource =
"/data-plane/supplier-api/nhs-supplier-api-dev/main/letters";
const mockedDeps: jest.Mocked<Deps> = {
snsClient: { send: jest.fn() } as unknown as SNSClient,
logger: { info: jest.fn(), error: jest.fn() } as unknown as pino.Logger,
env: {
EVENTPUB_SNS_TOPIC_ARN: "arn:aws:sns:region:account:topic",
EVENT_SOURCE: eventSource,
} as unknown as EnvVars,
} as Deps;

describe("letter-updates-transformer Lambda", () => {
beforeEach(() => {
jest.useFakeTimers();
});
Expand All @@ -50,7 +53,9 @@ describe("letter-updates-transformer Lambda", () => {
const newLetter = generateLetter("PRINTED");
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand All @@ -76,7 +81,9 @@ describe("letter-updates-transformer Lambda", () => {
newLetter.reasonCode = "R1";
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand All @@ -103,7 +110,9 @@ describe("letter-updates-transformer Lambda", () => {
newLetter.reasonCode = "R2";
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(newLetter)),
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

Expand Down Expand Up @@ -135,14 +144,28 @@ describe("letter-updates-transformer Lambda", () => {
expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});

it("does not publish non-modify events", async () => {
it("publishes INSERT events", async () => {
const handler = createHandler(mockedDeps);
const newLetter = generateLetter("ACCEPTED");
const expectedEntries = [
expect.objectContaining({
Message: JSON.stringify(
mapLetterToCloudEvent(newLetter, eventSource),
),
}),
];

const testData = generateKinesisEvent([generateInsertRecord(newLetter)]);
await handler(testData, mockDeep<Context>(), jest.fn());

expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
expect(mockedDeps.snsClient.send).toHaveBeenCalledWith(
expect.objectContaining({
input: expect.objectContaining({
TopicArn: "arn:aws:sns:region:account:topic",
PublishBatchRequestEntries: expectedEntries,
}),
}),
);
});

it("does not publish invalid letter data", async () => {
Expand All @@ -159,6 +182,55 @@ describe("letter-updates-transformer Lambda", () => {

expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});

it("throws error when kinesis data contains malformed JSON", async () => {
const handler = createHandler(mockedDeps);

// Create a Kinesis event with malformed JSON data
const malformedKinesisEvent: KinesisStreamEvent = {
Records: [
{
kinesis: {
data: Buffer.from("invalid-json-data").toString("base64"),
sequenceNumber: "12345",
},
} as any,
],
};

await expect(
handler(malformedKinesisEvent, mockDeep<Context>(), jest.fn()),
).rejects.toThrow();

expect(mockedDeps.logger.error).toHaveBeenCalledWith(
expect.objectContaining({
description: "Error extracting payload",
error: expect.any(Error),
record: expect.objectContaining({
kinesis: expect.objectContaining({
data: Buffer.from("invalid-json-data").toString("base64"),
}),
}),
}),
);
});

it("handles events with no records", async () => {
const handler = createHandler(mockedDeps);

// Create a Kinesis event with empty Records array
const emptyKinesisEvent: KinesisStreamEvent = { Records: [] };

await handler(emptyKinesisEvent, mockDeep<Context>(), jest.fn());

expect(mockedDeps.logger.info).toHaveBeenCalledWith(
expect.objectContaining({
description: "Number of records",
count: 0,
}),
);
expect(mockedDeps.snsClient.send).not.toHaveBeenCalled();
});
});

describe("Batching", () => {
Expand All @@ -168,7 +240,7 @@ describe("letter-updates-transformer Lambda", () => {
const newLetters = generateLetters(10, "PRINTED");
const expectedEntries = newLetters.map((letter) =>
expect.objectContaining({
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
);

Expand Down Expand Up @@ -197,19 +269,19 @@ describe("letter-updates-transformer Lambda", () => {
newLetters.slice(0, 10).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
newLetters.slice(10, 20).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
newLetters.slice(20).map((letter, index) =>
expect.objectContaining({
Id: expect.stringMatching(new RegExp(`-${index}$`)),
Message: JSON.stringify(mapLetterToCloudEvent(letter)),
Message: JSON.stringify(mapLetterToCloudEvent(letter, eventSource)),
}),
),
];
Expand Down
1 change: 1 addition & 0 deletions lambdas/letter-updates-transformer/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { z } from "zod";

const EnvVarsSchema = z.object({
EVENTPUB_SNS_TOPIC_ARN: z.string(),
EVENT_SOURCE: z.string(),
});

export type EnvVars = z.infer<typeof EnvVarsSchema>;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,20 @@ const BATCH_SIZE = 10;
export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
return async (streamEvent: KinesisStreamEvent) => {
deps.logger.info({ description: "Received event", streamEvent });
deps.logger.info({
description: "Number of records",
count: streamEvent.Records?.length || 0,
});

const cloudEvents: LetterEvent[] = streamEvent.Records.map((record) =>
// Ensure logging by extracting all records first
const ddbRecords: DynamoDBRecord[] = streamEvent.Records.map((record) =>
extractPayload(record, deps),
)
.filter((record) => record.eventName === "MODIFY")
.filter(
(record) =>
isChanged(record, "status") || isChanged(record, "reasonCode"),
)
);

const cloudEvents: LetterEvent[] = ddbRecords
.filter((record) => filterRecord(record, deps))
.map((element) => extractNewLetter(element))
.map((element) => mapLetterToCloudEvent(element));
.map((element) => mapLetterToCloudEvent(element, deps.env.EVENT_SOURCE));

for (const batch of generateBatches(cloudEvents)) {
deps.logger.info({
Expand All @@ -50,14 +53,54 @@ export default function createHandler(deps: Deps): Handler<KinesisStreamEvent> {
};
}

function filterRecord(record: DynamoDBRecord, deps: Deps): boolean {
let allowEvent = false;
if (record.eventName === "INSERT") {
allowEvent = true;
}

if (
record.eventName === "MODIFY" &&
(isChanged(record, "status") || isChanged(record, "reasonCode"))
) {
allowEvent = true;
}

deps.logger.info({
description: "Filtering record",
eventName: record.eventName,
eventId: record.eventID,
allowEvent,
});

return allowEvent;
}

function extractPayload(
record: KinesisStreamRecord,
deps: Deps,
): DynamoDBRecord {
// Kinesis data is base64 encoded
const payload = Buffer.from(record.kinesis.data, "base64").toString("utf8");
deps.logger.info({ description: "Extracted dynamoDBRecord", payload });
return JSON.parse(payload);
try {
deps.logger.info({
description: "Processing Kinesis record",
recordId: record.kinesis.sequenceNumber,
});

// Kinesis data is base64 encoded
const payload = Buffer.from(record.kinesis.data, "base64").toString("utf8");
deps.logger.info({ description: "Decoded payload", payload });

const jsonParsed = JSON.parse(payload);
deps.logger.info({ description: "Extracted dynamoDBRecord", jsonParsed });
return jsonParsed;
} catch (error) {
deps.logger.error({
description: "Error extracting payload",
error,
record,
});
throw error;
}
}

function isChanged(record: DynamoDBRecord, property: string): boolean {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,16 @@ describe("letter-mapper", () => {
source: "letter-rendering/source/test",
subject: "letter-rendering/source/letter/letter-id",
} as Letter;
const event = mapLetterToCloudEvent(letter);
const source = "/data-plane/supplier-api/nhs-supplier-api-dev/main/letters";
const event = mapLetterToCloudEvent(letter, source);

// Check it conforms to the letter event schema - parse will throw an error if not
$LetterEvent.parse(event);
expect(event.type).toBe("uk.nhs.notify.supplier-api.letter.PRINTED.v1");
expect(event.dataschema).toBe(
`https://notify.nhs.uk/cloudevents/schemas/supplier-api/letter.PRINTED.${event.dataschemaversion}.schema.json`,
);
expect(event.dataschemaversion).toBe("1.0.8");
expect(event.dataschemaversion).toMatch(/1\.\d+\.\d+/);
expect(event.subject).toBe("letter-origin/supplier-api/letter/id1");
expect(event.time).toBe("2025-11-24T15:55:18.000Z");
expect(event.recordedtime).toBe("2025-11-24T15:55:18.000Z");
Expand All @@ -45,5 +46,6 @@ describe("letter-mapper", () => {
event: event.id,
},
});
expect(event.source).toBe(source);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { LetterForEventPub } from "../types";

export default function mapLetterToCloudEvent(
letter: LetterForEventPub,
source: string,
): LetterEvent {
const eventId = randomUUID();
const dataschemaversion = eventSchemaPackage.version;
Expand All @@ -15,7 +16,7 @@ export default function mapLetterToCloudEvent(
plane: "data",
dataschema: `https://notify.nhs.uk/cloudevents/schemas/supplier-api/letter.${letter.status}.${dataschemaversion}.schema.json`,
dataschemaversion,
source: "/data-plane/supplier-api/letters",
source,
subject: `letter-origin/supplier-api/letter/${letter.id}`,

data: {
Expand Down
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading