diff --git a/terraform/account-wide-infrastructure/modules/athena/athena.tf b/terraform/account-wide-infrastructure/modules/athena/athena.tf index b5765e113..d111611e5 100644 --- a/terraform/account-wide-infrastructure/modules/athena/athena.tf +++ b/terraform/account-wide-infrastructure/modules/athena/athena.tf @@ -1,16 +1,3 @@ -resource "aws_athena_database" "reporting-db" { - name = var.database - - bucket = var.target_bucket_name - - encryption_configuration { - encryption_option = "SSE_KMS" - kms_key = aws_kms_key.athena.arn - } - - force_destroy = true -} - resource "aws_athena_workgroup" "athena" { name = "${var.name_prefix}-athena-wg" @@ -19,7 +6,7 @@ resource "aws_athena_workgroup" "athena" { publish_cloudwatch_metrics_enabled = true result_configuration { - output_location = "s3://{aws_s3_bucket.athena.bucket}/output/" + output_location = "s3://${aws_s3_bucket.athena.id}/output/" encryption_configuration { encryption_option = "SSE_KMS" diff --git a/terraform/account-wide-infrastructure/modules/athena/outputs.tf b/terraform/account-wide-infrastructure/modules/athena/outputs.tf index 574aeb3f8..40a8c7961 100644 --- a/terraform/account-wide-infrastructure/modules/athena/outputs.tf +++ b/terraform/account-wide-infrastructure/modules/athena/outputs.tf @@ -5,7 +5,3 @@ output "workgroup" { output "bucket" { value = aws_s3_bucket.athena } - -output "database" { - value = aws_athena_database.reporting-db -} diff --git a/terraform/account-wide-infrastructure/modules/glue/glue.tf b/terraform/account-wide-infrastructure/modules/glue/glue.tf index 64cca24f6..b2c4e262e 100644 --- a/terraform/account-wide-infrastructure/modules/glue/glue.tf +++ b/terraform/account-wide-infrastructure/modules/glue/glue.tf @@ -1,16 +1,16 @@ # Create Glue Data Catalog Database -resource "aws_glue_catalog_database" "raw_log_database" { - name = "${var.name_prefix}-raw_log" - location_uri = "${aws_s3_bucket.source-data-bucket.id}/" +resource "aws_glue_catalog_database" "log_database" { + name = "${var.name_prefix}-reporting" + location_uri = "${aws_s3_bucket.target-data-bucket.id}/logs/" } # Create Glue Crawler -resource "aws_glue_crawler" "raw_log_crawler" { - name = "${var.name_prefix}-raw-log-crawler" - database_name = aws_glue_catalog_database.raw_log_database.name +resource "aws_glue_crawler" "log_crawler" { + name = "${var.name_prefix}-log-crawler" + database_name = aws_glue_catalog_database.log_database.name role = aws_iam_role.glue_service_role.name s3_target { - path = "${aws_s3_bucket.source-data-bucket.id}/" + path = "${aws_s3_bucket.target-data-bucket.id}/logs/" } schema_change_policy { delete_behavior = "LOG" @@ -22,11 +22,11 @@ resource "aws_glue_crawler" "raw_log_crawler" { } }) } -resource "aws_glue_trigger" "raw_log_trigger" { +resource "aws_glue_trigger" "log_trigger" { name = "${var.name_prefix}-org-report-trigger" type = "ON_DEMAND" actions { - crawler_name = aws_glue_crawler.raw_log_crawler.name + crawler_name = aws_glue_crawler.log_crawler.name } } @@ -49,9 +49,9 @@ resource "aws_glue_job" "glue_job" { "--enable-auto-scaling" = "true" "--enable-continous-cloudwatch-log" = "true" "--datalake-formats" = "delta" - "--source-path" = "s3://${aws_s3_bucket.source-data-bucket.id}/" # Specify the source S3 path - "--destination-path" = "s3://${aws_s3_bucket.target-data-bucket.id}/" # Specify the destination S3 path - "--job-name" = "poc-glue-job" + "--source_path" = "s3://${aws_s3_bucket.source-data-bucket.id}/" # Specify the source S3 path + "--target_path" = "s3://${aws_s3_bucket.target-data-bucket.id}/logs" # Specify the destination S3 path + "--job_name" = "poc-glue-job" "--enable-continuous-log-filter" = "true" "--enable-metrics" = "true" "--extra-py-files" = "s3://${aws_s3_bucket.code-bucket.id}/src.zip" diff --git a/terraform/account-wide-infrastructure/modules/glue/iam.tf b/terraform/account-wide-infrastructure/modules/glue/iam.tf index 890b47593..267506ea7 100644 --- a/terraform/account-wide-infrastructure/modules/glue/iam.tf +++ b/terraform/account-wide-infrastructure/modules/glue/iam.tf @@ -15,7 +15,79 @@ resource "aws_iam_role" "glue_service_role" { }) } +data "aws_iam_policy_document" "glue_service" { + statement { + actions = [ + "s3:AbortMultipartUpload", + "s3:GetBucketLocation", + "s3:GetObject", + "s3:ListBucket", + "s3:ListBucketMultipartUploads", + "s3:PutObject", + "s3:DeleteObject", + ] + + resources = compact([ + aws_s3_bucket.source-data-bucket.arn, + "${aws_s3_bucket.source-data-bucket.arn}/*", + aws_s3_bucket.target-data-bucket.arn, + "${aws_s3_bucket.target-data-bucket.arn}/*", + aws_s3_bucket.code-bucket.arn, + "${aws_s3_bucket.code-bucket.arn}/*", + ]) + effect = "Allow" + } + + statement { + actions = [ + "kms:DescribeKey", + "kms:GenerateDataKey*", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:Decrypt", + ] + + resources = [ + aws_kms_key.glue.arn, + ] + + effect = "Allow" + } + + statement { + actions = [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ] + + resources = [ + "arn:aws:logs:*:*:*:/aws-glue/*", + # "arn:aws:logs:*:*:*:/customlogs/*" + ] + + effect = "Allow" + } + + statement { + actions = [ + "glue:*", + ] + + resources = [ + "*" + ] + + effect = "Allow" + } +} + +resource "aws_iam_policy" "glue_service" { + name = "${var.name_prefix}-glue" + policy = data.aws_iam_policy_document.glue_service.json +} + resource "aws_iam_role_policy_attachment" "glue_service" { - role = aws_iam_role.glue_service_role.id - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSGlueServiceRole" + role = aws_iam_role.glue_service_role.name + policy_arn = aws_iam_policy.glue_service.arn } diff --git a/terraform/account-wide-infrastructure/modules/glue/s3.tf b/terraform/account-wide-infrastructure/modules/glue/s3.tf index 4695f2b5b..56ed72010 100644 --- a/terraform/account-wide-infrastructure/modules/glue/s3.tf +++ b/terraform/account-wide-infrastructure/modules/glue/s3.tf @@ -174,6 +174,7 @@ data "archive_file" "python" { resource "aws_s3_object" "zip" { bucket = aws_s3_bucket.code-bucket.bucket - key = "main.py" - source = "${path.module}/files/src.zip" + key = "src.zip" + source = data.archive_file.python.output_path + etag = filemd5(data.archive_file.python.output_path) } diff --git a/terraform/account-wide-infrastructure/modules/glue/src/main.py b/terraform/account-wide-infrastructure/modules/glue/src/main.py index a29ef78d8..416cef5ef 100644 --- a/terraform/account-wide-infrastructure/modules/glue/src/main.py +++ b/terraform/account-wide-infrastructure/modules/glue/src/main.py @@ -1,27 +1,26 @@ import sys from awsglue.utils import getResolvedOptions +from pipeline import LogPipeline from pyspark.context import SparkContext -from src.pipeline import LogPipeline -from src.transformations import placeholder +from transformations import dtype_conversion, flatten_df, logSchema # Get arguments from AWS Glue job -args = getResolvedOptions( - sys.argv, ["JOB_NAME", "SOURCE_PATH", "TARGET_PATH", "PARTITION_COLS"] -) +args = getResolvedOptions(sys.argv, ["job_name", "source_path", "target_path"]) # Start Glue context sc = SparkContext() -partition_cols = args["PARTITION_COLS"].split(",") if "PARTITION_COLS" in args else [] +partition_cols = args["partition_cols"].split(",") if "partition_cols" in args else [] # Initialize ETL process etl_job = LogPipeline( spark_context=sc, - source_path=args["SOURCE_PATH"], - target_path=args["TARGET_PATH"], + source_path=args["source_path"], + target_path=args["target_path"], + schema=logSchema, partition_cols=partition_cols, - transformations=[placeholder], + transformations=[flatten_df, dtype_conversion], ) # Run the job diff --git a/terraform/account-wide-infrastructure/modules/glue/src/pipeline.py b/terraform/account-wide-infrastructure/modules/glue/src/pipeline.py index 50c34af23..e1ba22215 100644 --- a/terraform/account-wide-infrastructure/modules/glue/src/pipeline.py +++ b/terraform/account-wide-infrastructure/modules/glue/src/pipeline.py @@ -1,4 +1,4 @@ -from src.instances import GlueContextSingleton, LoggerSingleton +from instances import GlueContextSingleton, LoggerSingleton class LogPipeline: @@ -7,7 +7,8 @@ def __init__( spark_context, source_path, target_path, - partition_cols=None, + schema, + partition_cols=[], transformations=[], ): """Initialize Glue context, Spark session, logger, and paths""" @@ -16,6 +17,7 @@ def __init__( self.logger = LoggerSingleton().logger self.source_path = source_path self.target_path = target_path + self.schema = schema self.partition_cols = partition_cols self.transformations = transformations @@ -36,7 +38,11 @@ def run(self): def extract(self): """Extract JSON data from S3""" self.logger.info(f"Extracting data from {self.source_path} as JSON") - return self.spark.read.json(self.source_path) + return ( + self.spark.read.option("recursiveFileLookup", "true") + .schema(self.schema) + .json(self.source_path) + ) def transform(self, dataframe): """Apply a list of transformations on the dataframe""" @@ -48,6 +54,6 @@ def transform(self, dataframe): def load(self, dataframe): """Load transformed data into Parquet format""" self.logger.info(f"Loading data into {self.target_path} as Parquet") - dataframe.write.mode("overwrite").partitionBy(*self.partition_cols).parquet( + dataframe.write.mode("append").partitionBy(*self.partition_cols).parquet( self.target_path ) diff --git a/terraform/account-wide-infrastructure/modules/glue/src/transformations.py b/terraform/account-wide-infrastructure/modules/glue/src/transformations.py index 1d59d52bc..64bb4abe6 100644 --- a/terraform/account-wide-infrastructure/modules/glue/src/transformations.py +++ b/terraform/account-wide-infrastructure/modules/glue/src/transformations.py @@ -1 +1,76 @@ -def placeholder(): ... +from pyspark.sql.functions import to_timestamp +from pyspark.sql.types import ( + BooleanType, + StringType, + StructField, + StructType, + TimestampType, +) + +logSchema = StructType( + [ + StructField("time", TimestampType(), True), + StructField("index", StringType(), True), + StructField("host", StringType(), True), + StructField("source", StringType(), True), + StructField( + "event", + StructType( + [ + StructField("level", StringType(), True), + StructField("location", StringType(), True), + StructField("message", StringType(), True), + StructField("timestamp", StringType(), True), + StructField("service", StringType(), True), + StructField("cold_start", BooleanType(), True), + StructField("function_name", StringType(), True), + StructField("function_memory_size", StringType(), True), + StructField("function_arn", StringType(), True), + StructField("function_request_id", StringType(), True), + StructField("correlation_id", StringType(), True), + StructField("method", StringType(), True), + StructField("path", StringType(), True), + StructField( + "headers", + StructType( + [ + StructField("accept", StringType(), True), + StructField("accept-encoding", StringType(), True), + StructField("Authorization", StringType(), True), + StructField("Host", StringType(), True), + StructField( + "NHSD-Connection-Metadata", StringType(), True + ), + StructField("NHSD-Correlation-Id", StringType(), True), + StructField("User-Agent", StringType(), True), + StructField("X-Forwarded-For", StringType(), True), + StructField("X-Request-Id", StringType(), True), + ] + ), + True, + ), + StructField("log_reference", StringType(), True), + StructField("xray_trace_id", StringType(), True), + ] + ), + True, + ), + ] +) + + +def flatten_df(df): + cols = [] + for c in df.dtypes: + if "struct" in c[1]: + nested_col = c[0] + else: + cols.append(c[0]) + return df.select(*cols, f"{nested_col}.*") + + +def dtype_conversion(df): + df = df.withColumn( + "timestamp", to_timestamp(df["timestamp"], "yyyy-MM-dd HH:mm:ss,SSSXXX") + ) + return df diff --git a/terraform/infrastructure/data.tf b/terraform/infrastructure/data.tf index 2c3512fa3..e2d2d23d0 100644 --- a/terraform/infrastructure/data.tf +++ b/terraform/infrastructure/data.tf @@ -43,6 +43,11 @@ data "external" "current-info" { } data "aws_s3_bucket" "source-data-bucket" { - count = local.is_dev_env ? 1 : 0 + count = local.is_dev_env && !local.is_sandbox_env ? 1 : 0 bucket = "${local.shared_prefix}-source-data-bucket" } + +data "aws_kms_key" "glue" { + count = local.is_dev_env && !local.is_sandbox_env ? 1 : 0 + key_id = "alias/${local.shared_prefix}-glue" +} diff --git a/terraform/infrastructure/firehose.tf b/terraform/infrastructure/firehose.tf index fea8712ee..db063e6f4 100644 --- a/terraform/infrastructure/firehose.tf +++ b/terraform/infrastructure/firehose.tf @@ -9,5 +9,6 @@ module "firehose__processor" { splunk_index = local.splunk_index destination = "splunk" reporting_bucket_arn = local.reporting_bucket_arn - reporting_infra_toggle = local.is_dev_env + reporting_kms_arn = local.reporting_kms_arn + reporting_infra_toggle = local.is_dev_env && !local.is_sandbox_env } diff --git a/terraform/infrastructure/lambda.tf b/terraform/infrastructure/lambda.tf index 64b05c6bc..4658b4f78 100644 --- a/terraform/infrastructure/lambda.tf +++ b/terraform/infrastructure/lambda.tf @@ -20,11 +20,9 @@ module "consumer__readDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "read_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "read_document_reference.handler" + retention = var.log_retention_period } module "consumer__countDocumentReference" { @@ -49,11 +47,9 @@ module "consumer__countDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "count_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "count_document_reference.handler" + retention = var.log_retention_period } module "consumer__searchDocumentReference" { @@ -78,11 +74,9 @@ module "consumer__searchDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "search_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "search_document_reference.handler" + retention = var.log_retention_period } module "consumer__searchPostDocumentReference" { @@ -107,11 +101,9 @@ module "consumer__searchPostDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "search_post_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "search_post_document_reference.handler" + retention = var.log_retention_period } module "producer__createDocumentReference" { @@ -137,11 +129,9 @@ module "producer__createDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "create_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "create_document_reference.handler" + retention = var.log_retention_period } module "producer__deleteDocumentReference" { @@ -167,11 +157,9 @@ module "producer__deleteDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "delete_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "delete_document_reference.handler" + retention = var.log_retention_period } module "producer__readDocumentReference" { @@ -196,11 +184,9 @@ module "producer__readDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "read_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "read_document_reference.handler" + retention = var.log_retention_period } module "producer__searchDocumentReference" { @@ -225,11 +211,9 @@ module "producer__searchDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "search_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "search_document_reference.handler" + retention = var.log_retention_period } module "producer__searchPostDocumentReference" { @@ -254,11 +238,9 @@ module "producer__searchPostDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "search_post_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "search_post_document_reference.handler" + retention = var.log_retention_period } module "producer__updateDocumentReference" { @@ -284,11 +266,9 @@ module "producer__updateDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "update_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "update_document_reference.handler" + retention = var.log_retention_period } module "producer__upsertDocumentReference" { @@ -314,11 +294,9 @@ module "producer__upsertDocumentReference" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "upsert_document_reference.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "upsert_document_reference.handler" + retention = var.log_retention_period } module "consumer__status" { @@ -344,11 +322,9 @@ module "consumer__status" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "status.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "status.handler" + retention = var.log_retention_period } @@ -375,9 +351,7 @@ module "producer__status" { local.pointers_kms_read_write_arn, local.auth_store_read_policy_arn ] - firehose_subscriptions = [ - module.firehose__processor.firehose_subscription - ] - handler = "status.handler" - retention = var.log_retention_period + firehose_subscriptions = local.firehose_lambda_subscriptions + handler = "status.handler" + retention = var.log_retention_period } diff --git a/terraform/infrastructure/locals.tf b/terraform/infrastructure/locals.tf index dd1cd0f06..13594ef0a 100644 --- a/terraform/infrastructure/locals.tf +++ b/terraform/infrastructure/locals.tf @@ -30,6 +30,13 @@ locals { # Logic / vars for reporting reporting_bucket_arn = local.is_dev_env ? data.aws_s3_bucket.source-data-bucket[0].arn : null + reporting_kms_arn = local.is_dev_env ? data.aws_kms_key.glue[0].arn : null + firehose_lambda_subscriptions = local.is_dev_env ? [ + module.firehose__processor.firehose_subscription, + module.firehose__processor.firehose_reporting_subscription + ] : [ + module.firehose__processor.firehose_subscription + ] # Logic / vars for splunk environment splunk_environment = local.is_sandbox_env ? "${var.account_name}sandbox" : var.account_name diff --git a/terraform/infrastructure/modules/firehose/iam_firehose.tf b/terraform/infrastructure/modules/firehose/iam_firehose.tf index 89e72587d..340b12fb6 100644 --- a/terraform/infrastructure/modules/firehose/iam_firehose.tf +++ b/terraform/infrastructure/modules/firehose/iam_firehose.tf @@ -31,6 +31,7 @@ data "aws_iam_policy_document" "firehose" { aws_s3_bucket.firehose.arn, "${aws_s3_bucket.firehose.arn}/*", var.reporting_bucket_arn, + local.iam_firehose.reporting_s3_arn, ]) effect = "Allow" } @@ -44,9 +45,7 @@ data "aws_iam_policy_document" "firehose" { "kms:Decrypt", ] - resources = [ - aws_kms_key.firehose.arn, - ] + resources = local.iam_kms_resources } statement { actions = [ diff --git a/terraform/infrastructure/modules/firehose/kinesis.tf b/terraform/infrastructure/modules/firehose/kinesis.tf index 7c0c4a288..b44735357 100644 --- a/terraform/infrastructure/modules/firehose/kinesis.tf +++ b/terraform/infrastructure/modules/firehose/kinesis.tf @@ -67,7 +67,25 @@ resource "aws_kinesis_firehose_delivery_stream" "reporting_stream" { bucket_arn = var.reporting_bucket_arn processing_configuration { - enabled = "false" + enabled = "true" + + processors { + type = "Decompression" + parameters { + parameter_name = "CompressionFormat" + parameter_value = "GZIP" + } + } + + processors { + type = "CloudWatchLogProcessing" + + parameters { + parameter_name = "DataMessageExtraction" + parameter_value = "true" + } + } + } cloudwatch_logging_options { diff --git a/terraform/infrastructure/modules/firehose/locals.tf b/terraform/infrastructure/modules/firehose/locals.tf index 4658e993a..80a0f3367 100644 --- a/terraform/infrastructure/modules/firehose/locals.tf +++ b/terraform/infrastructure/modules/firehose/locals.tf @@ -34,10 +34,16 @@ locals { iam_firehose = { cloudwatch_reporting_log_group_arn = var.reporting_infra_toggle ? aws_cloudwatch_log_group.firehose_reporting[0].arn : null cloudwatch_reporting_log_stream_arn = var.reporting_infra_toggle ? aws_cloudwatch_log_stream.firehose_reporting[0].arn : null + reporting_s3_arn = var.reporting_infra_toggle ? "${var.reporting_bucket_arn}/*" : null } iam_subscriptions = { firehose_reporting_stream_arn = var.reporting_infra_toggle ? aws_kinesis_firehose_delivery_stream.reporting_stream[0].arn : null } + iam_kms_resources = compact([ + aws_kms_key.firehose.arn, + var.reporting_kms_arn + ]) + } diff --git a/terraform/infrastructure/modules/firehose/output.tf b/terraform/infrastructure/modules/firehose/output.tf index 9e83e76a3..a0b594642 100644 --- a/terraform/infrastructure/modules/firehose/output.tf +++ b/terraform/infrastructure/modules/firehose/output.tf @@ -31,3 +31,18 @@ output "firehose_subscription" { } } } + +output "firehose_reporting_subscription" { + value = var.reporting_infra_toggle ? { + destination = { + arn = local.iam_subscriptions.firehose_reporting_stream_arn + } + role = { + arn = aws_iam_role.firehose_subscription.arn + } + filter = { + # At least two items, and the first not any of INIT_START, START, END, REPORT + pattern = "[first_item_on_this_log_line != \"INIT_START\" && first_item_on_this_log_line != \"START\" && first_item_on_this_log_line != \"END\" && first_item_on_this_log_line != \"REPORT\", everything_else_on_this_log_line]" + } + } : null +} diff --git a/terraform/infrastructure/modules/firehose/vars.tf b/terraform/infrastructure/modules/firehose/vars.tf index dec876c12..e98affd1d 100644 --- a/terraform/infrastructure/modules/firehose/vars.tf +++ b/terraform/infrastructure/modules/firehose/vars.tf @@ -40,6 +40,11 @@ variable "reporting_bucket_arn" { default = null } +variable "reporting_kms_arn" { + type = string + default = null +} + variable "reporting_infra_toggle" { type = bool }