diff --git a/Makefile b/Makefile index 6c754d888..351dbc6a9 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,3 @@ - .EXPORT_ALL_VARIABLES: .NOTPARALLEL: .PHONY: * @@ -16,6 +15,8 @@ ACCOUNT ?= dev APP_ALIAS ?= default HOST ?= $(TF_WORKSPACE_NAME).api.record-locator.$(ENV).national.nhs.uk ENV_TYPE ?= $(ENV) +PERFTEST_TABLE_NAME ?= perftest +PERFTEST_HOST ?= perftest-1.perftest.record-locator.national.nhs.uk export PATH := $(PATH):$(PWD)/.venv/bin export USE_SHARED_RESOURCES := $(shell poetry run python scripts/are_resources_shared_for_stack.py $(TF_WORKSPACE_NAME)) @@ -246,3 +247,33 @@ generate-models: check-warn ## Generate Pydantic Models --output ./layer/nrlf/consumer/fhir/r4/model.py \ --base-class nrlf.core.parent_model.Parent \ --output-model-type "pydantic_v2.BaseModel" + + +generate-perftest-permissions: ## Generate perftest permissions and add to nrlf_permissions + poetry run python tests/performance/producer/generate_permissions.py --output_dir="$(DIST_PATH)/nrlf_permissions/K6PerformanceTest" + +perftest-producer: + @echo "Running producer performance tests with HOST=$(PERFTEST_HOST) and ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)" + k6 run tests/performance/producer/perftest.js -e HOST=$(PERFTEST_HOST) -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH) + +perftest-consumer: + @echo "Running consumer performance tests with HOST=$(PERFTEST_HOST) and ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)" + k6 run tests/performance/consumer/perftest.js -e HOST=$(PERFTEST_HOST) -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH) + +perftest-prep-generate-producer-data: + @echo "Generating producer reference with PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and DIST_PATH=$(DIST_PATH)" + mkdir -p $(DIST_PATH) + PYTHONPATH=. poetry run python tests/performance/perftest_environment.py generate_producer_data --output_dir="$(DIST_PATH)" + +perftest-prep-extract-consumer-data: + @echo "Generating consumer reference with PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and DIST_PATH=$(DIST_PATH)" + mkdir -p $(DIST_PATH) + PYTHONPATH=. poetry run python tests/performance/perftest_environment.py extract_consumer_data --output_dir="$(DIST_PATH)" + +perftest-prep-generate-pointer-table-extract: + @echo "Generating pointer table extract with PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and DIST_PATH=$(DIST_PATH)" + mkdir -p $(DIST_PATH) + PYTHONPATH=. poetry run python tests/performance/perftest_environment.py generate_pointer_table_extract --output_dir="$(DIST_PATH)" + +perftest-prepare: perftest-prep-generate-producer-data perftest-prep-extract-consumer-data perftest-prep-generate-pointer-table-extract + @echo "Prepared performance tests with PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and DIST_PATH=$(DIST_PATH)" diff --git a/tests/performance/README.md b/tests/performance/README.md new file mode 100644 index 000000000..6c01cc1a5 --- /dev/null +++ b/tests/performance/README.md @@ -0,0 +1,51 @@ +# Performance Testing + +some high level context short + +## Run perf tests + +### Prep the environment + +Perf tests are generally conducted in the perftest env. There's a selection of tables in the perftest env representing different pointer volume scenarios e.g. perftest-baseline vs perftest-1million (todo: update with real names!). + +To reset this table to the expected state for perftests, restore the table from a backup. + +In the steps below, make sure the table name is the table your environment is pointing at. You might need to redeploy NRLF lambdas to point at the desired table. + +### Prepare to run tests + +#### Pull certs for env + +```sh +assume management +make truststore-pull-all ENV=perftest +``` + +#### Generate permissions + +You will need to generate pointer permissions the first time performance tests are run in an environment e.g. if the perftest environment is destroyed & recreated. + +```sh +make generate permissions # makes a bunch of json permission files +make build # will take all permissions & create nrlf_permissions.zip file + +# apply this new permissions zip file to your environment +cd ./terraform/infrastructure +assume test # needed? +make init TF_WORKSPACE_NAME=perftest-1 ENV=perftest +tf apply +``` + +#### Generate input files + +```sh +# creates 2 csv files and a json file +make perftest-prepare PERFTEST_TABLE_NAME=perftest-baseline +``` + +### Run tests + +```sh +make perftest-consumer ENV_TYPE=perftest PERFTEST_HOST=perftest-1.perftest.record-locator.national.nhs.uk +make perftest-producer ENV_TYPE=perftest PERFTEST_HOST=perftest-1.perftest.record-locator.national.nhs.uk +``` diff --git a/tests/performance/constants.js b/tests/performance/constants.js index 668cc9fb3..8883876ad 100644 --- a/tests/performance/constants.js +++ b/tests/performance/constants.js @@ -1,3 +1,5 @@ +import { CATEGORY_TYPE_GROUPS } from "./type-category-mappings.js"; + export const DEFAULT_TEST_RECORD = open( "../data/DocumentReference/Y05868-736253002-Valid.json" ); @@ -12,23 +14,34 @@ export const ALL_POINTER_IDS = export const POINTERS_TO_DELETE = ALL_POINTER_IDS.slice(0, 3500); export const POINTER_IDS = ALL_POINTER_IDS.slice(3500); export const NHS_NUMBERS = REFERENCE_DATA["nhs_numbers"]; -export const POINTER_TYPES = [ + +// filter only 736253001, 736253002, 1363501000000100, 861421000000109, 749001000000101 for now +export const FILTERED_POINTER_TYPES = [ + "736253001", "736253002", "1363501000000100", - "1382601000000107", - "325691000000100", - "736373009", "861421000000109", - "887701000000100", - "736366004", - "735324008", - "824321000000109", - "2181441000000107", -]; -export const CATEGORIES = [ - "734163000", - "1102421000000108", - "823651000000106", - "721981007", - "103693007", + "749001000000101", ]; + +export const POINTER_TYPES = FILTERED_POINTER_TYPES; + +export const CATEGORIES = CATEGORY_TYPE_GROUPS.map( + (group) => group.category.code +); +export const POINTER_TYPE_DISPLAY = Object.fromEntries( + CATEGORY_TYPE_GROUPS.flatMap((group) => + group.types.map((t) => [t.code, t.display]) + ) +); +export const TYPE_CATEGORY_MAP = Object.fromEntries( + CATEGORY_TYPE_GROUPS.flatMap((group) => + group.types.map((t) => [t.code, group.category.code]) + ) +); +export const CATEGORY_DISPLAY = Object.fromEntries( + CATEGORY_TYPE_GROUPS.map((group) => [ + group.category.code, + group.category.display, + ]) +); diff --git a/tests/performance/consumer/client_perftest.js b/tests/performance/consumer/client_perftest.js new file mode 100644 index 000000000..25d40b756 --- /dev/null +++ b/tests/performance/consumer/client_perftest.js @@ -0,0 +1,228 @@ +import http from "k6/http"; +import { check } from "k6"; +import exec from "k6/execution"; +import { CATEGORY_TYPE_GROUPS } from "../type-category-mappings.js"; + +const csvPath = __ENV.DIST_PATH + ? `../../../${__ENV.DIST_PATH}/producer_reference_data.csv` + : "../producer_reference_data.csv"; +const csv = open(csvPath); +const lines = csv.trim().split("\n"); +// Skip header +const dataLines = lines.slice(1); + +function getNextPointer() { + // pick the next line according to iteration in scenario + const iter = exec.vu.iterationInScenario; + const index = iter % dataLines.length; + const line = dataLines[index]; + const [count, pointer_id, pointer_type, custodian, nhs_number] = line + .split(",") + .map((field) => field.trim()); + return { pointer_id, pointer_type, nhs_number }; +} + +function getHeaders(odsCode) { + return { + "Content-Type": "application/fhir+json", + "X-Request-Id": `K6perftest-consumer-${exec.scenario.name}-${exec.vu.idInTest}-${exec.vu.iterationInScenario}`, + "NHSD-Correlation-Id": `K6perftest-consumer-${exec.scenario.name}-${exec.vu.idInTest}-${exec.vu.iterationInScenario}`, + "NHSD-Connection-Metadata": JSON.stringify({ + "nrl.ods-code": odsCode, + "nrl.app-id": "K6PerformanceTest", + }), + "NHSD-Client-RP-Details": JSON.stringify({ + "developer.app.name": "K6PerformanceTest", + "developer.app.id": "K6PerformanceTest", + }), + }; +} + +function getCustodianFromPointerId(pointer_id) { + // pointer_id format is "CUSTODIAN-XXXX" + return pointer_id.split("-")[0]; +} + +function checkResponse(res) { + const is_success = check(res, { "status is 200": (r) => r.status === 200 }); + if (!is_success) { + console.warn(res.json()); + } +} + +const pointerTypeToCategoryMap = new Map(); +for (const group of CATEGORY_TYPE_GROUPS) { + for (const type of group.types) { + pointerTypeToCategoryMap.set(type.code, group.category.code); + } +} + +export function countDocumentReference() { + const { pointer_id, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + const identifier = encodeURIComponent( + `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}` + ); + + const res = http.get( + `https://${__ENV.HOST}/consumer/DocumentReference?_summary=count&subject:identifier=${identifier}`, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function readDocumentReference() { + const { pointer_id } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + + const res = http.get( + `https://${__ENV.HOST}/consumer/DocumentReference/${pointer_id}`, + { + headers: getHeaders(custodian), + } + ); + + checkResponse(res); +} + +export function searchDocumentReference() { + const { pointer_id, pointer_type, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + + const identifier = encodeURIComponent( + `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}` + ); + const type = encodeURIComponent(`http://snomed.info/sct|${pointer_type}`); + + const res = http.get( + `https://${__ENV.HOST}/consumer/DocumentReference?subject:identifier=${identifier}&type=${type}`, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function searchDocumentReferenceByCategory() { + const { pointer_id, pointer_type, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + const category_code = pointerTypeToCategoryMap.get(pointer_type); + + const identifier = encodeURIComponent( + `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}` + ); + const category = encodeURIComponent( + `http://snomed.info/sct|${category_code}` + ); + + const res = http.get( + `https://${__ENV.HOST}/consumer/DocumentReference?subject:identifier=${identifier}&category=${category}`, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function searchPostDocumentReference() { + const { pointer_id, pointer_type, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + + const body = JSON.stringify({ + "subject:identifier": `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}`, + type: `http://snomed.info/sct|${pointer_type}`, + }); + + const res = http.post( + `https://${__ENV.HOST}/consumer/DocumentReference/_search`, + body, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function searchPostDocumentReferenceByCategory() { + const { pointer_id, pointer_type, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + const category_code = pointerTypeToCategoryMap.get(pointer_type); + + const body = JSON.stringify({ + "subject:identifier": `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}`, + category: `http://snomed.info/sct|${category_code}`, + }); + + const res = http.post( + `https://${__ENV.HOST}/consumer/DocumentReference/_search`, + body, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function countPostDocumentReference() { + const { pointer_id, nhs_number } = getNextPointer(); + const custodian = getCustodianFromPointerId(pointer_id); + + const body = JSON.stringify({ + "subject:identifier": `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}`, + }); + + const res = http.post( + `https://${__ENV.HOST}/consumer/DocumentReference/_search?_summary=count`, + body, + { + headers: getHeaders(custodian), + } + ); + checkResponse(res); +} + +export function searchPostDocumentReferenceAccessDenied() { + const { nhs_number, pointer_type } = getNextPointer(); + + const body = JSON.stringify({ + "subject:identifier": `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}`, + type: `http://snomed.info/sct|${pointer_type}`, + }); + + // Use a custodian that should not have access (simulate denied) + const deniedCustodian = "DENIED_ODS_CODE"; + let headers = getHeaders(deniedCustodian); + headers["NHSD-Connection-Metadata"] = JSON.stringify({ + "nrl.ods-code": deniedCustodian, + "nrl.app-id": "K6PerformanceTest", + }); + + const res = http.post( + `https://${__ENV.HOST}/consumer/DocumentReference/_search`, + body, + { + headers: headers, + } + ); + + const is_denied = check(res, { "status is 403": (r) => r.status === 403 }); + if (!is_denied) { + console.warn(`Expected access denied but got: ${res.status}`); + } +} + +export function readDocumentReferenceNotFound() { + const { custodian } = getNextPointer(); + + const res = http.get( + `https://${__ENV.HOST}/consumer/DocumentReference/NonExistentID`, + { + headers: getHeaders(custodian), + } + ); + + // we expect a 404 here + check(res, { "status is 404": (r) => r.status === 404 }); +} diff --git a/tests/performance/consumer/consumer_reference_data.json b/tests/performance/consumer/consumer_reference_data.json new file mode 100644 index 000000000..c38c42ca0 --- /dev/null +++ b/tests/performance/consumer/consumer_reference_data.json @@ -0,0 +1,9 @@ +{ + "nhs_numbers": ["9694202043"], + "pointer_ids": [ + "RQI-9347490b-6087-4be6-8c95-82ad9fb0c83f", + "RQI-123", + "RQI-7fba4cfb-acfe-4b62-ac85-916197a24868" + ], + "custodians": ["RQI"] +} diff --git a/tests/performance/consumer/perftest.config.json b/tests/performance/consumer/perftest.config.json new file mode 100644 index 000000000..4bbe6b4d9 --- /dev/null +++ b/tests/performance/consumer/perftest.config.json @@ -0,0 +1,51 @@ +{ + "profile": "SOAK", + "tlsAuth": { + "cert": "../../../truststore/client/${ENV_TYPE}.crt", + "key": "../../../truststore/client/${ENV_TYPE}.key" + }, + "scenarios": { + "countDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "countPostDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "readDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchDocumentReferenceByCategory": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchPostDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchPostDocumentReferenceByCategory": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + } + } +} diff --git a/tests/performance/consumer/perftest.js b/tests/performance/consumer/perftest.js new file mode 100644 index 000000000..f70ee9fa8 --- /dev/null +++ b/tests/performance/consumer/perftest.js @@ -0,0 +1,53 @@ +export * from "./client_perftest.js"; + +const config = JSON.parse(open("./perftest.config.json")); +const PROFILE = config.profile.toUpperCase(); +const tlsAuth = config.tlsAuth; +const scenarioConfigs = config.scenarios; + +function makeSoakScenario(execName, conf) { + return { + exec: execName, + executor: "ramping-arrival-rate", + startRate: 0, + timeUnit: "1s", + preAllocatedVUs: 5, + stages: [ + { target: conf.tps, duration: conf.duration }, + { target: conf.tps, duration: conf.hold }, + { target: 0, duration: conf.rampDown }, + ], + }; +} + +function makeStressScenario(execName, conf) { + return { + exec: execName, + executor: "ramping-arrival-rate", + startRate: conf.startTps || 1, + timeUnit: "1s", + preAllocatedVUs: 5, + stages: [ + { target: conf.tps, duration: conf.duration }, + { target: conf.tps, duration: conf.hold }, + ], + }; +} + +const scenarios = {}; +for (const [name, conf] of Object.entries(scenarioConfigs)) { + scenarios[name] = + PROFILE === "SOAK" + ? makeSoakScenario(name, conf) + : makeStressScenario(name, conf); +} + +export const options = { + tlsAuth: [ + { + cert: open(tlsAuth.cert.replace("${ENV_TYPE}", __ENV.ENV_TYPE)), + key: open(tlsAuth.key.replace("${ENV_TYPE}", __ENV.ENV_TYPE)), + }, + ], + scenarios, +}; diff --git a/tests/performance/generate_type_categories_constants.py b/tests/performance/generate_type_categories_constants.py new file mode 100644 index 000000000..fb157df01 --- /dev/null +++ b/tests/performance/generate_type_categories_constants.py @@ -0,0 +1,43 @@ +import json +import re + +from nrlf.core.constants import CATEGORY_ATTRIBUTES, TYPE_ATTRIBUTES, TYPE_CATEGORIES + + +def extract_code(enum_val): + return enum_val.split("|")[1] + + +def extract_category_code(enum_val): + return enum_val.split("|")[1] + + +# Build mapping from category code to pointer types +category_type_groups = {} +for pt, cat in TYPE_CATEGORIES.items(): + category_code = extract_category_code(cat) + pointer_code = extract_code(pt) + pointer_display = TYPE_ATTRIBUTES[pt]["display"] + category_display = CATEGORY_ATTRIBUTES[cat]["display"] + + if category_code not in category_type_groups: + category_type_groups[category_code] = { + "category": {"code": category_code, "display": category_display}, + "types": [], + } + category_type_groups[category_code]["types"].append( + {"code": pointer_code, "display": pointer_display} + ) + +# Convert to list for JS export +category_type_groups_list = list(category_type_groups.values()) + +js_content = f"export const CATEGORY_TYPE_GROUPS = {json.dumps(category_type_groups_list, indent=2)};\n" + +# Remove quotes from keys +js_content = re.sub(r'"(\w+)":', r"\1:", js_content) + +with open("./tests/performance/type-category-mappings.js", "w") as f: + # Please note, that the linter/formatter likes to add commas at the end of the last items in objects/arrays. + # I'm not adding the code to add those here as I don't think we need the extra complexity. + f.write(js_content) diff --git a/tests/performance/perftest_environment.py b/tests/performance/perftest_environment.py new file mode 100644 index 000000000..f96a60371 --- /dev/null +++ b/tests/performance/perftest_environment.py @@ -0,0 +1,246 @@ +import csv +import json +import os +import pathlib +import re + +import boto3 + +DYNAMODB = boto3.resource("dynamodb", region_name="eu-west-2") + +default_table_name = "default-table-name" + + +def _get_pointers_table_name(): + perftest_table_name = os.environ.get("PERFTEST_TABLE_NAME", default_table_name) + + if re.search("^nhsd-nrlf--.+-pointers-table$", perftest_table_name): + return perftest_table_name + + return f"nhsd-nrlf--{perftest_table_name}-pointers-table" + + +def extract_consumer_data(output_dir="."): + out = output_dir + "/consumer_reference_data.json" + table_name = _get_pointers_table_name() + table = DYNAMODB.Table(table_name) + scan_kwargs = {} + done = False + start_key = None + nhs_numbers = set() + pointer_ids = [] + custodians = set() + while not done: + if start_key: + scan_kwargs["ExclusiveStartKey"] = start_key + response = table.scan(**scan_kwargs) + for item in response.get("Items", []): + nhs_number = item.get("nhs_number") + pointer_id = item.get("id") + custodian = item.get("custodian") + if nhs_number: + nhs_numbers.add(nhs_number) + if pointer_id: + pointer_ids.append(pointer_id) + if custodian: + custodians.add(custodian) + start_key = response.get("LastEvaluatedKey", None) + done = start_key is None + data = { + "nhs_numbers": list(nhs_numbers), + "pointer_ids": pointer_ids, + "custodians": list(custodians), + } + pathlib.Path(out).write_text(json.dumps(data)) + print(f"Consumer data written to {out}") # noqa: T201 + + +# Semi-deterministic NHS number generator (duplicated from seed_nft_tables.py) +CHECKSUM_WEIGHTS = [i for i in range(10, 1, -1)] + + +class TestNhsNumbersIterator: + def __iter__(self): + self.first9 = 900000000 + return self + + def __next__(self): + if self.first9 > 999999999: + raise StopIteration + checksum = 10 + while checksum == 10: + self.first9 += 1 + nhs_no_digits = list(map(int, str(self.first9))) + checksum = ( + sum( + weight * digit + for weight, digit in zip(CHECKSUM_WEIGHTS, nhs_no_digits) + ) + * -1 + % 11 + ) + nhs_no = str(self.first9) + str(checksum) + return nhs_no + + +def generate_producer_data( + output_dir=".", + proportion_existing=0.8, # Proportion of output that should be existing NHS numbers + total_count=1000, # Total number of NHS numbers to output + last_existing_nhs_number=None, # Optionally specify the last NHS number in the table +): + """ + Generate a reference dataset for producer tests, containing a mix of existing and new NHS numbers. + - proportion_existing: fraction of output that should be existing NHS numbers (0.0-1.0) + - total_count: total number of NHS numbers in output + NHS numbers are generated in a semi-deterministic way, similar to the NFT seeding script. + """ + out = output_dir + "/producer_reference_data.json" + table_name = _get_pointers_table_name() + table = DYNAMODB.Table(table_name) + scan_kwargs = {} + done = False + start_key = None + existing_nhs_numbers = set() + + # Scan DynamoDB table for all existing NHS numbers + while not done: + if start_key: + scan_kwargs["ExclusiveStartKey"] = start_key + response = table.scan(**scan_kwargs) + for item in response.get("Items", []): + nhs_number = item.get("nhs_number") + if nhs_number: + existing_nhs_numbers.add(nhs_number) + start_key = response.get("LastEvaluatedKey", None) + done = start_key is None + + # Calculate how many existing and new NHS numbers to use + num_existing = min( + int(total_count * proportion_existing), len(existing_nhs_numbers) + ) + num_new = total_count - num_existing + + # Select existing NHS numbers + selected_existing_nhs_numbers = list(existing_nhs_numbers)[:num_existing] + + # Generate new NHS numbers that do not overlap with existing + new_nhs_numbers = set() + # If last_existing_nhs_number is provided, start from the next value + if last_existing_nhs_number is not None: + try: + # Use only the first 9 digits for incrementing + start_first9 = int(str(last_existing_nhs_number)[:9]) + except Exception: + start_first9 = 900000000 + + class CustomTestNhsNumbersIterator: + def __iter__(self): + self.first9 = start_first9 + return self + + def __next__(self): + if self.first9 > 999999999: + raise StopIteration + checksum = 10 + while checksum == 10: + self.first9 += 1 + nhs_no_digits = list(map(int, str(self.first9))) + checksum = ( + sum( + weight * digit + for weight, digit in zip(CHECKSUM_WEIGHTS, nhs_no_digits) + ) + * -1 + % 11 + ) + nhs_no = str(self.first9) + str(checksum) + return nhs_no + + nhs_iter = iter(CustomTestNhsNumbersIterator()) + else: + nhs_iter = iter(TestNhsNumbersIterator()) + while len(new_nhs_numbers) < num_new: + nhs = next(nhs_iter) + if nhs not in existing_nhs_numbers and nhs not in new_nhs_numbers: + new_nhs_numbers.add(nhs) + + # Prepare output data + data = { + "new_nhs_numbers": list(new_nhs_numbers), + "existing_nhs_numbers": selected_existing_nhs_numbers, + "proportion_existing": proportion_existing, + "total_count": total_count, + } + pathlib.Path(out).write_text(json.dumps(data)) + print(f"Producer data written to {out}") # noqa: T201 + + +def generate_pointer_table_extract( + output_dir=".", +): + """ + Generate a CSV file containing all pointer IDs, pointer type, custodian, and nhs_number (patient). + """ + out = output_dir + "/producer_reference_data.csv" + table_name = _get_pointers_table_name() + table = DYNAMODB.Table(table_name) + scan_kwargs = {} + done = False + start_key = None + buffer = [] + buffer_size = 1_000_000 # 10k rows needs ~3MB of RAM, so 1M rows needs ~300MB + count = 1 + + with open(out, "w", newline="") as csv_file: + writer = csv.writer(csv_file) + writer.writerow( + ["count", "pointer_id", "pointer_type", "custodian", "nhs_number"] + ) + while not done: + + if start_key: + scan_kwargs["ExclusiveStartKey"] = start_key + response = table.scan(**scan_kwargs) + for item in response.get("Items", []): + pointer_id = item.get("id", "") + pointer_type = item.get("type", "").split("|", 1)[ + 1 + ] # only keep code part + custodian = item.get("custodian", "") + nhs_number = item.get("nhs_number", "") + buffer.append( + [ + str(field).strip() + for field in [ + count, + pointer_id, + pointer_type, + custodian, + nhs_number, + ] + ] + ) + count += 1 + if len(buffer) >= buffer_size: + print("Writing buffer to CSV...") # noqa: T201 + writer.writerows(buffer) + buffer.clear() + start_key = response.get("LastEvaluatedKey", None) + done = start_key is None + # Write any remaining rows in buffer + if buffer: + writer.writerows(buffer) + print(f"Producer CSV data written to {out}") # noqa: T201 + + +if __name__ == "__main__": + import fire + + fire.Fire( + { + "extract_consumer_data": extract_consumer_data, + "generate_producer_data": generate_producer_data, + "generate_pointer_table_extract": generate_pointer_table_extract, + } + ) diff --git a/tests/performance/producer/client_perftest.js b/tests/performance/producer/client_perftest.js new file mode 100644 index 000000000..5d1cf08c9 --- /dev/null +++ b/tests/performance/producer/client_perftest.js @@ -0,0 +1,398 @@ +import http from "k6/http"; +import { ODS_CODE } from "../constants.js"; +import { check } from "k6"; +import { randomItem } from "https://jslib.k6.io/k6-utils/1.2.0/index.js"; +import { crypto } from "k6/experimental/webcrypto"; +import { createRecord } from "../setup.js"; +import exec from "k6/execution"; + +const csvPath = __ENV.DIST_PATH + ? `../../../${__ENV.DIST_PATH}/producer_reference_data.csv` + : "../producer_reference_data.csv"; +const csv = open(csvPath); +const lines = csv.trim().split("\n"); +// Skip header +const dataLines = lines.slice(1); + +let pointerDist; +let pickPointerType, pickCustodian; + +const referenceData = JSON.parse(open("./producer_reference_data.json")); +const NHSNumberStart = referenceData.NHSNumberStart; +const NHSNumberEnd = referenceData.NHSNumberEnd; +const ReuseNHSNumbersRatio = referenceData.ReuseNHSNumbersRatio || 0.8; +const MinimumFreeNHSNumbers = referenceData.MinimumFreeNHSNumbers || 100000; + +const NHS_NUMBER_MIN = 0; +const NHS_NUMBER_MAX = 9999999999; + +// Check if NHSNumberEnd is too close to max +if (NHS_NUMBER_MAX - NHSNumberEnd < MinimumFreeNHSNumbers) { + throw new Error( + `NHSNumberEnd (${NHSNumberEnd}) is too close to the maximum NHS number (${NHS_NUMBER_MAX}). Minimum free NHS numbers required: ${MinimumFreeNHSNumbers}` + ); +} + +// Get next pointer data from CSV for current iteration +function getNextPointer() { + // Use K6's iteration in scenario for deterministic selection + const iter = exec.vu.iterationInScenario; + const index = iter % dataLines.length; + const line = dataLines[index]; + // Adjust field names as per CSV columns: count,pointer_id,pointer_type,custodian,nhs_number + const [count, pointer_id, pointer_type, custodian, nhs_number] = line + .split(",") + .map((field) => field.trim()); + return { pointer_id, pointer_type, custodian, nhs_number }; +} + +function randomNHSNumberInRange(start, end) { + // Inclusive range + const range = end - start + 1; + const rand = Math.floor(Math.random() * range); + return start + rand; +} + +function generateValidNHSNumber(start, end) { + let nhsNumber = undefined; + + while (!nhsNumber) { + const seedNumber = randomNHSNumberInRange(start, end); + const first9Str = String(seedNumber).padStart(9, "0").substring(0, 9); + + if (!first9Str.match(/^\d{9}$/)) { + throw new Error( + `bad NHS number generated - expected 9 digits: ${first9Str}, ${seedNumber}` + ); + } + + const parts = []; + const digits = first9Str.split(""); + for (let i = 0; i < digits.length; i++) { + parts.push(parseInt(digits[i], 10) * (10 - i)); + } + const list_sum = parts.reduce((a, b) => a + b, 0); + + let checksum = 11 - (list_sum % 11); + + if (checksum === 10) { + // Checksum of 10 means NHS number is invalid + continue; + } + + if (checksum === 11) { + checksum = 0; + } + + nhsNumber = `${first9Str}${checksum}`; + } + + return nhsNumber; +} + +function pickNHSNumber() { + if (Math.random() < ReuseNHSNumbersRatio) { + // Reuse: pick within [NHSNumberStart, NHSNumberEnd] + return generateValidNHSNumber(NHSNumberStart, NHSNumberEnd); + } else { + // New: always pick from [NHSNumberEnd + 1, NHS_NUMBER_MAX] + return generateValidNHSNumber(NHSNumberEnd + 1, NHS_NUMBER_MAX); + } +} + +// Load expanded pointer/custodian distributions +pointerDist = JSON.parse(open("./expanded_pointer_distributions.json")); +// Filter types to only those with a custodian array +pointerDist.types = pointerDist.types.filter( + (type) => pointerDist.custodians[type] +); + +pickPointerType = function () { + return randomItem(pointerDist.types); +}; +pickCustodian = function (typeCode) { + const arr = pointerDist.custodians[typeCode]; + if (!arr) throw new Error(`No custodian array for type ${typeCode}`); + return randomItem(arr); +}; + +function getBaseURL() { + return `https://${__ENV.HOST}/producer/DocumentReference`; +} + +function getHeaders(odsCode = ODS_CODE) { + return { + "Content-Type": "application/fhir+json", + "X-Request-Id": `K6perftest-producer-${exec.scenario.name}-${exec.vu.idInTest}-${exec.vu.iterationInScenario}`, + "NHSD-Correlation-Id": `K6perftest-producer-${exec.scenario.name}-${exec.vu.idInTest}-${exec.vu.iterationInScenario}`, + "NHSD-Connection-Metadata": JSON.stringify({ + "nrl.ods-code": odsCode, + "nrl.app-id": "K6PerformanceTest", + }), + "NHSD-Client-RP-Details": JSON.stringify({ + "developer.app.name": "K6PerformanceTest", + "developer.app.id": "K6PerformanceTest", + }), + }; +} +function checkResponse(res) { + const is_success = check(res, { "status is 200": (r) => r.status === 200 }); + if (!is_success) { + console.warn(res.json()); + } +} + +export function createDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + const res = http.post(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(res, { "create status is 201": (r) => r.status === 201 }); + if (res.status !== 201) { + console.warn( + `Failed to create record: ${res.status}: ${ + JSON.parse(res.body).issue[0].diagnostics + }` + ); + } +} + +export function readDocumentReference() { + const { pointer_id, custodian } = getNextPointer(); + const res = http.get(`${getBaseURL()}/${pointer_id}`, { + headers: getHeaders(custodian), + }); + checkResponse(res); +} + +export function createThenReadDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + const createRes = http.post(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(createRes, { "create status is 201": (r) => r.status === 201 }); + if (createRes.status !== 201) { + console.warn( + `Failed to create record: ${createRes.status}: ${ + JSON.parse(createRes.body).issue[0].diagnostics + }` + ); + return; + } + + // Get pointer ID from the location header + const locationHeader = createRes.headers["Location"]; + const createdId = locationHeader + ? locationHeader.split("/").pop() + : record.id; + + const readRes = http.get(`${getBaseURL()}/${createdId}`, { + headers: getHeaders(custodian), + }); + + check(readRes, { "create and read status is 200": (r) => r.status === 200 }); + if (readRes.status !== 200) { + console.warn( + `Failed to read record: ${readRes.status}: ${ + JSON.parse(readRes.body).issue[0].diagnostics + }` + ); + } +} + +export function upsertThenReadDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + record.id = `${custodian}-${crypto.randomUUID()}`; + const upsertRes = http.put(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(upsertRes, { "upsert status is 201": (r) => r.status === 201 }); + if (upsertRes.status !== 201) { + console.warn( + `Failed to upsert record: ${upsertRes.status}: ${ + JSON.parse(upsertRes.body).issue[0].diagnostics + }` + ); + return; + } + + const upsertedId = record.id; + + const readRes = http.get(`${getBaseURL()}/${upsertedId}`, { + headers: getHeaders(custodian), + }); + + check(readRes, { "upsert and read status is 200": (r) => r.status === 200 }); + if (readRes.status !== 200) { + console.warn( + `Failed to read record: ${upsertedId}, ${readRes.status}: ${ + JSON.parse(readRes.body).issue[0].diagnostics + }` + ); + } +} + +export function createThenUpdateDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + const createRes = http.post(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(createRes, { + "createThenUpdateDocumentReference: create status is 201": (r) => + r.status === 201, + }); + if (createRes.status !== 201) { + console.warn( + `Failed to create record: ${createRes.status}: ${ + JSON.parse(createRes.body).issue[0].diagnostics + }` + ); + return; + } + + // Get pointer ID from the location header + const locationHeader = createRes.headers["Location"]; + const createdId = locationHeader + ? locationHeader.split("/").pop() + : record.id; + + record.id = createdId; + + // Now update the record + record.content[0].attachment.url = "https://example.com/k6-updated-url.pdf"; + + const updateRes = http.put( + `${getBaseURL()}/${createdId}`, + JSON.stringify(record), + { + headers: getHeaders(custodian), + } + ); + + check(updateRes, { + "createThenUpdateDocumentReference: update status is 200": (r) => + r.status === 200, + }); + if (updateRes.status !== 200) { + console.warn( + `Failed to update record (createThenUpdateDocumentReference): ${ + updateRes.status + }: ${JSON.parse(updateRes.body).issue[0].diagnostics}` + ); + } +} + +export function upsertThenUpdateDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + record.id = `${custodian}-${crypto.randomUUID()}`; + const upsertRes = http.put(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(upsertRes, { "upsert status is 201": (r) => r.status === 201 }); + if (upsertRes.status !== 201) { + console.warn( + `Failed to upsert record: ${upsertRes.status}: ${ + JSON.parse(upsertRes.body).issue[0].diagnostics + }` + ); + return; + } + + const upsertedId = record.id; + + // Now update the record + record.content[0].attachment.url = "https://example.com/k6-updated-url.pdf"; + + const updateRes = http.put( + `${getBaseURL()}/${upsertedId}`, + JSON.stringify(record), + { + headers: getHeaders(custodian), + } + ); + + check(updateRes, { + "upsertThenUpdateDocumentReference: update status is 200": (r) => + r.status === 200, + }); + if (updateRes.status !== 200) { + console.warn( + `Failed to update record (upsertThenUpdateDocumentReference): ${upsertedId}, ${ + updateRes.status + }: ${JSON.parse(updateRes.body).issue[0].diagnostics}` + ); + } +} + +export function upsertDocumentReference() { + const nhsNumber = pickNHSNumber(); + const pointerType = pickPointerType(); + const custodian = pickCustodian(pointerType); + const record = createRecord(nhsNumber, pointerType, custodian); + record.id = `${custodian}-k6perf-${crypto.randomUUID()}`; + const res = http.put(getBaseURL(), JSON.stringify(record), { + headers: getHeaders(custodian), + }); + check(res, { "create status is 201": (r) => r.status === 201 }); + if (res.status !== 201) { + console.warn( + `Failed to create record: ${res.status}: ${ + JSON.parse(res.body).issue[0].diagnostics + }` + ); + } +} + +export function searchDocumentReference() { + const { pointer_type, nhs_number, custodian } = getNextPointer(); + const identifier = encodeURIComponent( + `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}` + ); + const type = encodeURIComponent(`http://snomed.info/sct|${pointer_type}`); + const url = `${getBaseURL()}?subject:identifier=${identifier}&type=${type}`; + const res = http.get(url, { + headers: getHeaders(custodian), + }); + check(res, { + "searchDocumentReference status is 200": (r) => r.status === 200, + }); + if (res.status !== 200) { + console.log( + `Search failed with ${res.status}: ${JSON.stringify(res.body)}` + ); + } +} + +export function searchPostDocumentReference() { + const { pointer_type, nhs_number, custodian } = getNextPointer(); + const body = JSON.stringify({ + "subject:identifier": `https://fhir.nhs.uk/Id/nhs-number|${nhs_number}`, + type: `http://snomed.info/sct|${pointer_type}`, + }); + const res = http.post(`${getBaseURL()}/_search`, body, { + headers: getHeaders(custodian), + }); + check(res, { + "searchPostDocumentReference status is 200": (r) => r.status === 200, + }); + if (res.status !== 200) { + console.log( + `Search failed with ${res.status}: ${JSON.stringify(res.body)}` + ); + } +} diff --git a/tests/performance/producer/delete_permissions.py b/tests/performance/producer/delete_permissions.py new file mode 100644 index 000000000..b9d0d7d60 --- /dev/null +++ b/tests/performance/producer/delete_permissions.py @@ -0,0 +1,23 @@ +from pathlib import Path + +import fire +from seed_nft_tables import DEFAULT_CUSTODIAN_DISTRIBUTIONS + + +def main(permissions_dir="../../dist/nrlf_permissions/K6PerformanceTest"): + permissions_dir = Path(permissions_dir) + # Collect all custodian codes from DEFAULT_CUSTODIAN_DISTRIBUTIONS + custodian_codes = set() + for custodians in DEFAULT_CUSTODIAN_DISTRIBUTIONS.values(): + custodian_codes.update(custodians.keys()) + + # Delete only the files for these custodians + for custodian in custodian_codes: + file = permissions_dir / f"{custodian}.json" + if file.exists(): + print(f"Deleting {file}") # noqa: T201 + file.unlink() + + +if __name__ == "__main__": + fire.Fire(main) diff --git a/tests/performance/producer/expanded_pointer_distributions.json b/tests/performance/producer/expanded_pointer_distributions.json new file mode 100644 index 000000000..f98c89146 --- /dev/null +++ b/tests/performance/producer/expanded_pointer_distributions.json @@ -0,0 +1,229 @@ +{ + "types": [ + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "736253002", + "1382601000000107", + "1382601000000107", + "1382601000000107", + "1382601000000107", + "1382601000000107", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "887701000000100", + "861421000000109", + "861421000000109", + "861421000000109", + "861421000000109", + "861421000000109", + "735324008", + "735324008", + "735324008", + "735324008", + "735324008", + "824321000000109", + "824321000000109", + "824321000000109", + "824321000000109", + "824321000000109" + ], + "custodians": { + "736253002": [ + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRPG", + "TRHA", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRRE", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TRAT", + "TWR4", + "TWR4", + "TWR4", + "TWR4", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRKL", + "TRW1", + "TRW1", + "TRW1", + "TRW1", + "TRW1", + "TRH5", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRP7", + "TRWK", + "TRWK", + "TRWK", + "TRWK", + "TRWK", + "TRWK", + "TRWK", + "TRWK", + "TRQY", + "TRQY", + "TRQY", + "TRV5", + "TRV5", + "TRV5", + "TRJ8", + "TRJ8", + "TRXA", + "TRXA", + "TRXA", + "TRXA", + "T11X", + "TG6V", + "TG6V" + ], + "1382601000000107": ["T8GX8", "T8GX8", "T8GX8", "TQUY", "TQUY"], + "887701000000100": [ + "TV1", + "TV2", + "TV2", + "TV3", + "TV4", + "TV5", + "TV5", + "TV5", + "TV6" + ], + "861421000000109": [ + "TV1", + "TV1", + "TV2", + "TV2", + "TV3", + "TV4", + "TV5", + "TV5", + "TV5", + "TV6" + ], + "735324008": ["TV1", "TV2", "TV3", "TV4", "TV4", "TV5", "TV5", "TV6"], + "824321000000109": ["TRXT"] + } +} diff --git a/tests/performance/producer/generate_distributions.py b/tests/performance/producer/generate_distributions.py new file mode 100644 index 000000000..1ea674dce --- /dev/null +++ b/tests/performance/producer/generate_distributions.py @@ -0,0 +1,30 @@ +import json +from pathlib import Path + +# Import the constants from your seed_nft_tables.py +from seed_nft_tables import DEFAULT_CUSTODIAN_DISTRIBUTIONS, DEFAULT_TYPE_DISTRIBUTIONS + + +def expand_distribution(dist): + arr = [] + for key, count in dist.items(): + arr.extend([key] * count) + return arr + + +# Expand type distribution +expanded_types = expand_distribution(DEFAULT_TYPE_DISTRIBUTIONS) + +# Expand custodian distributions for each type +expanded_custodians = {} +for type_code, custodian_dist in DEFAULT_CUSTODIAN_DISTRIBUTIONS.items(): + expanded_custodians[type_code] = expand_distribution(custodian_dist) + +output = {"types": expanded_types, "custodians": expanded_custodians} + +out_path = Path("./tests/performance/expanded_pointer_distributions.json") +out_path.parent.mkdir(parents=True, exist_ok=True) +with out_path.open("w") as f: + json.dump(output, f, indent=2) + +print(f"Expanded pointer distributions written to {out_path}") # noqa: T201 diff --git a/tests/performance/producer/generate_permissions.py b/tests/performance/producer/generate_permissions.py new file mode 100644 index 000000000..29b289e6c --- /dev/null +++ b/tests/performance/producer/generate_permissions.py @@ -0,0 +1,27 @@ +import json +from pathlib import Path + +import fire +from seed_nft_tables import DEFAULT_CUSTODIAN_DISTRIBUTIONS + + +def main(output_dir="../../dist/nrlf_permissions/K6PerformanceTest"): + output_dir = Path(output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + + # Invert the mapping: custodian -> list of pointer types + custodian_permissions = {} + for pointer_type, custodians in DEFAULT_CUSTODIAN_DISTRIBUTIONS.items(): + for custodian, _ in custodians.items(): + custodian_permissions.setdefault(custodian, []).append(pointer_type) + + for custodian, pointer_types in custodian_permissions.items(): + permissions = [f"http://snomed.info/sct|{pt}" for pt in pointer_types] + out_path = output_dir / f"{custodian}.json" + with out_path.open("w") as f: + json.dump(permissions, f, indent=2) + print(f"Wrote permissions for {custodian} to {out_path}") # noqa: T201 + + +if __name__ == "__main__": + fire.Fire(main) diff --git a/tests/performance/producer/perftest.config.json b/tests/performance/producer/perftest.config.json new file mode 100644 index 000000000..20726b3ae --- /dev/null +++ b/tests/performance/producer/perftest.config.json @@ -0,0 +1,39 @@ +{ + "profile": "SOAK", + "tlsAuth": { + "cert": "../../../truststore/client/${ENV_TYPE}.crt", + "key": "../../../truststore/client/${ENV_TYPE}.key" + }, + "scenarios": { + "createDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "readDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "upsertDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + }, + "searchPostDocumentReference": { + "tps": 5, + "duration": "5m", + "hold": "30m", + "rampDown": "1m" + } + } +} diff --git a/tests/performance/producer/perftest.js b/tests/performance/producer/perftest.js new file mode 100644 index 000000000..f70ee9fa8 --- /dev/null +++ b/tests/performance/producer/perftest.js @@ -0,0 +1,53 @@ +export * from "./client_perftest.js"; + +const config = JSON.parse(open("./perftest.config.json")); +const PROFILE = config.profile.toUpperCase(); +const tlsAuth = config.tlsAuth; +const scenarioConfigs = config.scenarios; + +function makeSoakScenario(execName, conf) { + return { + exec: execName, + executor: "ramping-arrival-rate", + startRate: 0, + timeUnit: "1s", + preAllocatedVUs: 5, + stages: [ + { target: conf.tps, duration: conf.duration }, + { target: conf.tps, duration: conf.hold }, + { target: 0, duration: conf.rampDown }, + ], + }; +} + +function makeStressScenario(execName, conf) { + return { + exec: execName, + executor: "ramping-arrival-rate", + startRate: conf.startTps || 1, + timeUnit: "1s", + preAllocatedVUs: 5, + stages: [ + { target: conf.tps, duration: conf.duration }, + { target: conf.tps, duration: conf.hold }, + ], + }; +} + +const scenarios = {}; +for (const [name, conf] of Object.entries(scenarioConfigs)) { + scenarios[name] = + PROFILE === "SOAK" + ? makeSoakScenario(name, conf) + : makeStressScenario(name, conf); +} + +export const options = { + tlsAuth: [ + { + cert: open(tlsAuth.cert.replace("${ENV_TYPE}", __ENV.ENV_TYPE)), + key: open(tlsAuth.key.replace("${ENV_TYPE}", __ENV.ENV_TYPE)), + }, + ], + scenarios, +}; diff --git a/tests/performance/producer/producer_reference_data.json b/tests/performance/producer/producer_reference_data.json new file mode 100644 index 000000000..a96ba8698 --- /dev/null +++ b/tests/performance/producer/producer_reference_data.json @@ -0,0 +1,6 @@ +{ + "NHSNumberStart": 9000000000, + "NHSNumberEnd": 9000999999, + "ReuseNHSNumbersRatio": 0.8, + "MinimumFreeNHSNumbers": 100000 +} diff --git a/tests/performance/producer/seed_nft_tables.py b/tests/performance/producer/seed_nft_tables.py new file mode 100644 index 000000000..f59a85af1 --- /dev/null +++ b/tests/performance/producer/seed_nft_tables.py @@ -0,0 +1,68 @@ +import boto3 + +dynamodb = boto3.client("dynamodb") +resource = boto3.resource("dynamodb") + + +# DOC_REF_TEMPLATE = load_document_reference("NFT-template") + +CHECKSUM_WEIGHTS = [i for i in range(10, 1, -1)] + +# These are based on the Nov 7th 2025 pointer stats report +DEFAULT_TYPE_DISTRIBUTIONS = { + "736253002": 65, # mental health crisis plan + "1382601000000107": 5, # respect form + "887701000000100": 15, # emergency healthcare plan + "861421000000109": 5, # eol care coordination summary + "735324008": 5, # treatment escalation plan + "824321000000109": 5, # summary record +} + +DEFAULT_CUSTODIAN_DISTRIBUTIONS = { + "736253002": { + "TRPG": 9, + "TRHA": 1, + "TRRE": 20, + "TRAT": 10, + "TWR4": 4, + "TRKL": 9, + "TRW1": 5, + "TRH5": 1, + "TRP7": 13, + "TRWK": 8, + "TRQY": 3, + "TRV5": 3, + "TRJ8": 2, + "TRXA": 4, + "T11X": 1, + "TG6V": 2, + }, + "1382601000000107": {"T8GX8": 3, "TQUY": 2}, # respect form + "887701000000100": { + "TV1": 1, + "TV2": 2, + "TV3": 1, + "TV4": 1, + "TV5": 3, + "TV6": 1, + }, # emergency healthcare plan + "861421000000109": { + "TV1": 2, + "TV2": 2, + "TV3": 1, + "TV4": 1, + "TV5": 3, + "TV6": 1, + }, # eol care coordination summary + "735324008": { + "TV1": 1, + "TV2": 1, + "TV3": 1, + "TV4": 2, + "TV5": 2, + "TV6": 1, + }, # treatment escalation plan + "824321000000109": { + "TRXT": 1, + }, # summary record currently has only one supplier +} diff --git a/tests/performance/setup.js b/tests/performance/setup.js index ab1542c02..cd273f9e1 100644 --- a/tests/performance/setup.js +++ b/tests/performance/setup.js @@ -1,14 +1,29 @@ -import { POINTER_TYPES, DEFAULT_TEST_RECORD, ODS_CODE } from "./constants.js"; -import { crypto } from "k6/experimental/webcrypto"; +import { + DEFAULT_TEST_RECORD, + ODS_CODE, + POINTER_TYPE_DISPLAY, + TYPE_CATEGORY_MAP, + CATEGORY_DISPLAY, +} from "./constants.js"; -export function createRecord(nhsNumber, pointerType) { +export function createRecord(nhsNumber, pointerType, custodian = ODS_CODE) { const record = JSON.parse(DEFAULT_TEST_RECORD); - record.id = `${ODS_CODE}-${crypto.randomUUID()}`; record.type.coding[0].code = pointerType; - record.type.coding[0].display = POINTER_TYPES[pointerType]; + if (!POINTER_TYPE_DISPLAY[pointerType]) { + throw new Error(`Display not found for pointerType: ${pointerType}`); + } + record.type.coding[0].display = POINTER_TYPE_DISPLAY[pointerType]; + + const categoryCode = TYPE_CATEGORY_MAP[pointerType]; + record.category[0].coding[0].code = categoryCode; + record.category[0].coding[0].display = CATEGORY_DISPLAY[categoryCode]; + record.subject.identifier.value = nhsNumber; record.context.sourcePatientInfo.identifier.value = nhsNumber; + // Set custodian + record.custodian.identifier.value = custodian; + return record; } diff --git a/tests/performance/type-category-mappings.js b/tests/performance/type-category-mappings.js new file mode 100644 index 000000000..37939bdcf --- /dev/null +++ b/tests/performance/type-category-mappings.js @@ -0,0 +1,124 @@ +export const CATEGORY_TYPE_GROUPS = [ + { + category: { + code: "734163000", + display: "Care plan", + }, + types: [ + { + code: "736253002", + display: "Mental health crisis plan", + }, + { + code: "887701000000100", + display: "Emergency health care plan", + }, + { + code: "861421000000109", + display: "End of life care coordination summary", + }, + { + code: "1382601000000107", + display: + "ReSPECT (Recommended Summary Plan for Emergency Care and Treatment) form", + }, + { + code: "325691000000100", + display: "Contingency plan", + }, + { + code: "736373009", + display: "End of life care plan", + }, + { + code: "16521000000101", + display: "Lloyd George record folder", + }, + { + code: "736366004", + display: "Advance care plan", + }, + { + code: "735324008", + display: "Treatment escalation plan", + }, + { + code: "2181441000000107", + display: "Personalised Care and Support Plan", + }, + ], + }, + { + category: { + code: "1102421000000108", + display: "Observations", + }, + types: [ + { + code: "1363501000000100", + display: + "Royal College of Physicians NEWS2 (National Early Warning Score 2) chart", + }, + ], + }, + { + category: { + code: "823651000000106", + display: "Clinical note", + }, + types: [ + { + code: "824321000000109", + display: "Summary record", + }, + ], + }, + { + category: { + code: "721981007", + display: "Diagnostic studies report", + }, + types: [ + { + code: "MAULR", + display: "MRA Upper Limb Rt", + }, + ], + }, + { + category: { + code: "103693007", + display: "Diagnostic procedure", + }, + types: [ + { + code: "MAXIB", + display: "MRI Axilla Both", + }, + ], + }, + { + category: { + code: "419891008", + display: "Record artifact", + }, + types: [ + { + code: "749001000000101", + display: "Appointment", + }, + ], + }, + { + category: { + code: "716931000000107", + display: "Record headings", + }, + types: [ + { + code: "887181000000106", + display: "Clinical summary", + }, + ], + }, +];