diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml
index 95f5775c6b9..604a90a3364 100644
--- a/.generator/schemas/v2/openapi.yaml
+++ b/.generator/schemas/v2/openapi.yaml
@@ -6766,6 +6766,8 @@ components:
description: Optional prefix for blobs written to the container.
example: logs/
type: string
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
container_name:
description: The name of the Azure Blob Storage container to store logs
in.
@@ -36391,6 +36393,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
client_id:
description: Azure AD client ID used for authentication.
example: a1b2c3d4-5678-90ab-cdef-1234567890ab
@@ -38337,6 +38341,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth'
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
@@ -38418,6 +38424,8 @@ components:
description: S3 bucket name.
example: error-logs
type: string
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: amazon-s3-destination
@@ -38539,6 +38547,8 @@ components:
description: Name of the Amazon S3 bucket in Security Lake (3-63 characters).
example: security-lake-bucket
type: string
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
custom_source_name:
description: Custom source name for the logs in Security Lake.
example: my-custom-source
@@ -38598,6 +38608,39 @@ components:
role session.
type: string
type: object
+ ObservabilityPipelineBufferOptions:
+ description: Configuration for buffer settings on destination components.
+ oneOf:
+ - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions'
+ - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions'
+ - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions'
+ ObservabilityPipelineBufferOptionsDiskType:
+ default: disk
+ description: The type of the buffer that will be configured, a disk buffer.
+ enum:
+ - disk
+ type: string
+ x-enum-varnames:
+ - DISK
+ ObservabilityPipelineBufferOptionsMemoryType:
+ default: memory
+ description: The type of the buffer that will be configured, a memory buffer.
+ enum:
+ - memory
+ type: string
+ x-enum-varnames:
+ - MEMORY
+ ObservabilityPipelineBufferOptionsWhenFull:
+ default: block
+ description: Behavior when the buffer is full (block and stop accepting new
+ events, or drop new events)
+ enum:
+ - block
+ - drop_newest
+ type: string
+ x-enum-varnames:
+ - BLOCK
+ - DROP_NEWEST
ObservabilityPipelineCloudPremDestination:
description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem.
@@ -38868,6 +38911,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
compression:
$ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression'
encoding:
@@ -39093,6 +39138,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: datadog-logs-destination
@@ -39347,6 +39394,19 @@ components:
type: string
x-enum-varnames:
- DEDUPE
+ ObservabilityPipelineDiskBufferOptions:
+ description: Options for configuring a disk buffer.
+ properties:
+ max_size:
+ description: Maximum size of the disk buffer.
+ example: 4096
+ format: int64
+ type: integer
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType'
+ when_full:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsWhenFull'
+ type: object
ObservabilityPipelineElasticsearchDestination:
description: 'The `elasticsearch` destination writes logs to an Elasticsearch
cluster.
@@ -39356,6 +39416,8 @@ components:
properties:
api_version:
$ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion'
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to in Elasticsearch.
example: logs-index
@@ -39901,6 +39963,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
customer_id:
description: The Google Chronicle customer ID.
example: abcdefg123456789
@@ -39969,6 +40033,8 @@ components:
description: Name of the GCS bucket.
example: error-logs
type: string
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: gcs-destination
@@ -40053,6 +40119,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestinationEncoding'
id:
@@ -40579,6 +40647,28 @@ components:
type: string
x-enum-varnames:
- LOGSTASH
+ ObservabilityPipelineMemoryBufferOptions:
+ description: Options for configuring a memory buffer by byte size.
+ properties:
+ max_size:
+ description: Maximum size of the memory buffer.
+ example: 4096
+ format: int64
+ type: integer
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
+ type: object
+ ObservabilityPipelineMemoryBufferSizeOptions:
+ description: Options for configuring a memory buffer by queue length.
+ properties:
+ max_events:
+ description: Maximum events for the memory buffer.
+ example: 500
+ format: int64
+ type: integer
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
+ type: object
ObservabilityPipelineMetadataEntry:
description: A custom metadata entry.
properties:
@@ -40701,6 +40791,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: new-relic-destination
@@ -40847,6 +40939,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
@@ -41560,6 +41654,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: rsyslog-destination
@@ -42065,6 +42161,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: sentinelone-destination
@@ -42119,6 +42217,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding'
framing:
@@ -42495,6 +42595,8 @@ components:
If `false`, Splunk assigns the time the event was received.'
example: true
type: boolean
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding'
id:
@@ -42619,6 +42721,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding'
header_custom_fields:
@@ -42732,6 +42836,8 @@ components:
**Supported pipeline types:** logs'
properties:
+ buffer:
+ $ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: syslog-ng-destination
diff --git a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
index 1687442550d..d46d455c142 100644
--- a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
@@ -26,6 +26,7 @@
*/
@JsonPropertyOrder({
AzureStorageDestination.JSON_PROPERTY_BLOB_PREFIX,
+ AzureStorageDestination.JSON_PROPERTY_BUFFER,
AzureStorageDestination.JSON_PROPERTY_CONTAINER_NAME,
AzureStorageDestination.JSON_PROPERTY_ID,
AzureStorageDestination.JSON_PROPERTY_INPUTS,
@@ -38,6 +39,9 @@ public class AzureStorageDestination {
public static final String JSON_PROPERTY_BLOB_PREFIX = "blob_prefix";
private String blobPrefix;
+ public static final String JSON_PROPERTY_BUFFER = "buffer";
+ private ObservabilityPipelineBufferOptions buffer;
+
public static final String JSON_PROPERTY_CONTAINER_NAME = "container_name";
private String containerName;
@@ -86,6 +90,28 @@ public void setBlobPrefix(String blobPrefix) {
this.blobPrefix = blobPrefix;
}
+ public AzureStorageDestination buffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ this.unparsed |= buffer.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for buffer settings on destination components.
+ *
+ * @return buffer
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_BUFFER)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineBufferOptions getBuffer() {
+ return buffer;
+ }
+
+ public void setBuffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ }
+
public AzureStorageDestination containerName(String containerName) {
this.containerName = containerName;
return this;
@@ -232,6 +258,7 @@ public boolean equals(Object o) {
}
AzureStorageDestination azureStorageDestination = (AzureStorageDestination) o;
return Objects.equals(this.blobPrefix, azureStorageDestination.blobPrefix)
+ && Objects.equals(this.buffer, azureStorageDestination.buffer)
&& Objects.equals(this.containerName, azureStorageDestination.containerName)
&& Objects.equals(this.id, azureStorageDestination.id)
&& Objects.equals(this.inputs, azureStorageDestination.inputs)
@@ -241,7 +268,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(blobPrefix, containerName, id, inputs, type, additionalProperties);
+ return Objects.hash(blobPrefix, buffer, containerName, id, inputs, type, additionalProperties);
}
@Override
@@ -249,6 +276,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class AzureStorageDestination {\n");
sb.append(" blobPrefix: ").append(toIndentedString(blobPrefix)).append("\n");
+ sb.append(" buffer: ").append(toIndentedString(buffer)).append("\n");
sb.append(" containerName: ").append(toIndentedString(containerName)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
index 51571518f7b..500121c0cf6 100644
--- a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
@@ -25,6 +25,7 @@
*
Supported pipeline types: logs
*/
@JsonPropertyOrder({
+ MicrosoftSentinelDestination.JSON_PROPERTY_BUFFER,
MicrosoftSentinelDestination.JSON_PROPERTY_CLIENT_ID,
MicrosoftSentinelDestination.JSON_PROPERTY_DCR_IMMUTABLE_ID,
MicrosoftSentinelDestination.JSON_PROPERTY_ID,
@@ -37,6 +38,9 @@
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
public class MicrosoftSentinelDestination {
@JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_BUFFER = "buffer";
+ private ObservabilityPipelineBufferOptions buffer;
+
public static final String JSON_PROPERTY_CLIENT_ID = "client_id";
private String clientId;
@@ -81,6 +85,28 @@ public MicrosoftSentinelDestination(
this.unparsed |= !type.isValid();
}
+ public MicrosoftSentinelDestination buffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ this.unparsed |= buffer.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for buffer settings on destination components.
+ *
+ * @return buffer
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_BUFFER)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineBufferOptions getBuffer() {
+ return buffer;
+ }
+
+ public void setBuffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ }
+
public MicrosoftSentinelDestination clientId(String clientId) {
this.clientId = clientId;
return this;
@@ -286,7 +312,8 @@ public boolean equals(Object o) {
return false;
}
MicrosoftSentinelDestination microsoftSentinelDestination = (MicrosoftSentinelDestination) o;
- return Objects.equals(this.clientId, microsoftSentinelDestination.clientId)
+ return Objects.equals(this.buffer, microsoftSentinelDestination.buffer)
+ && Objects.equals(this.clientId, microsoftSentinelDestination.clientId)
&& Objects.equals(this.dcrImmutableId, microsoftSentinelDestination.dcrImmutableId)
&& Objects.equals(this.id, microsoftSentinelDestination.id)
&& Objects.equals(this.inputs, microsoftSentinelDestination.inputs)
@@ -300,13 +327,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- clientId, dcrImmutableId, id, inputs, table, tenantId, type, additionalProperties);
+ buffer, clientId, dcrImmutableId, id, inputs, table, tenantId, type, additionalProperties);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class MicrosoftSentinelDestination {\n");
+ sb.append(" buffer: ").append(toIndentedString(buffer)).append("\n");
sb.append(" clientId: ").append(toIndentedString(clientId)).append("\n");
sb.append(" dcrImmutableId: ").append(toIndentedString(dcrImmutableId)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
index 57c9c89f0ca..ee1ab9d0f08 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
@@ -26,6 +26,7 @@
*/
@JsonPropertyOrder({
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_AUTH,
+ ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_BUFFER,
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_BULK_INDEX,
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_ID,
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_INPUTS,
@@ -38,6 +39,9 @@ public class ObservabilityPipelineAmazonOpenSearchDestination {
public static final String JSON_PROPERTY_AUTH = "auth";
private ObservabilityPipelineAmazonOpenSearchDestinationAuth auth;
+ public static final String JSON_PROPERTY_BUFFER = "buffer";
+ private ObservabilityPipelineBufferOptions buffer;
+
public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index";
private String bulkIndex;
@@ -92,6 +96,29 @@ public void setAuth(ObservabilityPipelineAmazonOpenSearchDestinationAuth auth) {
this.auth = auth;
}
+ public ObservabilityPipelineAmazonOpenSearchDestination buffer(
+ ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ this.unparsed |= buffer.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for buffer settings on destination components.
+ *
+ * @return buffer
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_BUFFER)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineBufferOptions getBuffer() {
+ return buffer;
+ }
+
+ public void setBuffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ }
+
public ObservabilityPipelineAmazonOpenSearchDestination bulkIndex(String bulkIndex) {
this.bulkIndex = bulkIndex;
return this;
@@ -243,6 +270,7 @@ public boolean equals(Object o) {
observabilityPipelineAmazonOpenSearchDestination =
(ObservabilityPipelineAmazonOpenSearchDestination) o;
return Objects.equals(this.auth, observabilityPipelineAmazonOpenSearchDestination.auth)
+ && Objects.equals(this.buffer, observabilityPipelineAmazonOpenSearchDestination.buffer)
&& Objects.equals(
this.bulkIndex, observabilityPipelineAmazonOpenSearchDestination.bulkIndex)
&& Objects.equals(this.id, observabilityPipelineAmazonOpenSearchDestination.id)
@@ -255,7 +283,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(auth, bulkIndex, id, inputs, type, additionalProperties);
+ return Objects.hash(auth, buffer, bulkIndex, id, inputs, type, additionalProperties);
}
@Override
@@ -263,6 +291,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ObservabilityPipelineAmazonOpenSearchDestination {\n");
sb.append(" auth: ").append(toIndentedString(auth)).append("\n");
+ sb.append(" buffer: ").append(toIndentedString(buffer)).append("\n");
sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
index 6538942f2ef..c78d0024dd6 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
@@ -28,6 +28,7 @@
@JsonPropertyOrder({
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_BUCKET,
+ ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_BUFFER,
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_ID,
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_INPUTS,
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_KEY_PREFIX,
@@ -46,6 +47,9 @@ public class ObservabilityPipelineAmazonS3Destination {
public static final String JSON_PROPERTY_BUCKET = "bucket";
private String bucket;
+ public static final String JSON_PROPERTY_BUFFER = "buffer";
+ private ObservabilityPipelineBufferOptions buffer;
+
public static final String JSON_PROPERTY_ID = "id";
private String id;
@@ -133,6 +137,29 @@ public void setBucket(String bucket) {
this.bucket = bucket;
}
+ public ObservabilityPipelineAmazonS3Destination buffer(
+ ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ this.unparsed |= buffer.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for buffer settings on destination components.
+ *
+ * @return buffer
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_BUFFER)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineBufferOptions getBuffer() {
+ return buffer;
+ }
+
+ public void setBuffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ }
+
public ObservabilityPipelineAmazonS3Destination id(String id) {
this.id = id;
return this;
@@ -350,6 +377,7 @@ public boolean equals(Object o) {
(ObservabilityPipelineAmazonS3Destination) o;
return Objects.equals(this.auth, observabilityPipelineAmazonS3Destination.auth)
&& Objects.equals(this.bucket, observabilityPipelineAmazonS3Destination.bucket)
+ && Objects.equals(this.buffer, observabilityPipelineAmazonS3Destination.buffer)
&& Objects.equals(this.id, observabilityPipelineAmazonS3Destination.id)
&& Objects.equals(this.inputs, observabilityPipelineAmazonS3Destination.inputs)
&& Objects.equals(this.keyPrefix, observabilityPipelineAmazonS3Destination.keyPrefix)
@@ -365,7 +393,17 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- auth, bucket, id, inputs, keyPrefix, region, storageClass, tls, type, additionalProperties);
+ auth,
+ bucket,
+ buffer,
+ id,
+ inputs,
+ keyPrefix,
+ region,
+ storageClass,
+ tls,
+ type,
+ additionalProperties);
}
@Override
@@ -374,6 +412,7 @@ public String toString() {
sb.append("class ObservabilityPipelineAmazonS3Destination {\n");
sb.append(" auth: ").append(toIndentedString(auth)).append("\n");
sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n");
+ sb.append(" buffer: ").append(toIndentedString(buffer)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
sb.append(" keyPrefix: ").append(toIndentedString(keyPrefix)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
index 221bc4b2806..98fc932d182 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
@@ -27,6 +27,7 @@
@JsonPropertyOrder({
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_BUCKET,
+ ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_BUFFER,
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_CUSTOM_SOURCE_NAME,
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_ID,
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_INPUTS,
@@ -44,6 +45,9 @@ public class ObservabilityPipelineAmazonSecurityLakeDestination {
public static final String JSON_PROPERTY_BUCKET = "bucket";
private String bucket;
+ public static final String JSON_PROPERTY_BUFFER = "buffer";
+ private ObservabilityPipelineBufferOptions buffer;
+
public static final String JSON_PROPERTY_CUSTOM_SOURCE_NAME = "custom_source_name";
private String customSourceName;
@@ -128,6 +132,29 @@ public void setBucket(String bucket) {
this.bucket = bucket;
}
+ public ObservabilityPipelineAmazonSecurityLakeDestination buffer(
+ ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ this.unparsed |= buffer.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for buffer settings on destination components.
+ *
+ * @return buffer
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_BUFFER)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineBufferOptions getBuffer() {
+ return buffer;
+ }
+
+ public void setBuffer(ObservabilityPipelineBufferOptions buffer) {
+ this.buffer = buffer;
+ }
+
public ObservabilityPipelineAmazonSecurityLakeDestination customSourceName(
String customSourceName) {
this.customSourceName = customSourceName;
@@ -324,6 +351,7 @@ public boolean equals(Object o) {
(ObservabilityPipelineAmazonSecurityLakeDestination) o;
return Objects.equals(this.auth, observabilityPipelineAmazonSecurityLakeDestination.auth)
&& Objects.equals(this.bucket, observabilityPipelineAmazonSecurityLakeDestination.bucket)
+ && Objects.equals(this.buffer, observabilityPipelineAmazonSecurityLakeDestination.buffer)
&& Objects.equals(
this.customSourceName,
observabilityPipelineAmazonSecurityLakeDestination.customSourceName)
@@ -340,7 +368,16 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- auth, bucket, customSourceName, id, inputs, region, tls, type, additionalProperties);
+ auth,
+ bucket,
+ buffer,
+ customSourceName,
+ id,
+ inputs,
+ region,
+ tls,
+ type,
+ additionalProperties);
}
@Override
@@ -349,6 +386,7 @@ public String toString() {
sb.append("class ObservabilityPipelineAmazonSecurityLakeDestination {\n");
sb.append(" auth: ").append(toIndentedString(auth)).append("\n");
sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n");
+ sb.append(" buffer: ").append(toIndentedString(buffer)).append("\n");
sb.append(" customSourceName: ").append(toIndentedString(customSourceName)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineBufferOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineBufferOptions.java
new file mode 100644
index 00000000000..bc7dbbaed68
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineBufferOptions.java
@@ -0,0 +1,390 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.AbstractOpenApiSchema;
+import com.datadog.api.client.JSON;
+import com.datadog.api.client.UnparsedObject;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import jakarta.ws.rs.core.GenericType;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+@JsonDeserialize(
+ using = ObservabilityPipelineBufferOptions.ObservabilityPipelineBufferOptionsDeserializer.class)
+@JsonSerialize(
+ using = ObservabilityPipelineBufferOptions.ObservabilityPipelineBufferOptionsSerializer.class)
+public class ObservabilityPipelineBufferOptions extends AbstractOpenApiSchema {
+ private static final Logger log =
+ Logger.getLogger(ObservabilityPipelineBufferOptions.class.getName());
+
+ @JsonIgnore public boolean unparsed = false;
+
+ public static class ObservabilityPipelineBufferOptionsSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineBufferOptionsSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineBufferOptionsSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineBufferOptions value, JsonGenerator jgen, SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.getActualInstance());
+ }
+ }
+
+ public static class ObservabilityPipelineBufferOptionsDeserializer
+ extends StdDeserializer {
+ public ObservabilityPipelineBufferOptionsDeserializer() {
+ this(ObservabilityPipelineBufferOptions.class);
+ }
+
+ public ObservabilityPipelineBufferOptionsDeserializer(Class> vc) {
+ super(vc);
+ }
+
+ @Override
+ public ObservabilityPipelineBufferOptions deserialize(
+ JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+ JsonNode tree = jp.readValueAsTree();
+ Object deserialized = null;
+ Object tmp = null;
+ boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS);
+ int match = 0;
+ JsonToken token = tree.traverse(jp.getCodec()).nextToken();
+ // deserialize ObservabilityPipelineDiskBufferOptions
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineDiskBufferOptions.class.equals(Integer.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Long.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Float.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Double.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Boolean.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineDiskBufferOptions.class.equals(Integer.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineDiskBufferOptions.class.equals(Float.class)
+ || ObservabilityPipelineDiskBufferOptions.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineDiskBufferOptions.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineDiskBufferOptions.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineDiskBufferOptions.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineDiskBufferOptions) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineDiskBufferOptions'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineDiskBufferOptions'",
+ e);
+ }
+
+ // deserialize ObservabilityPipelineMemoryBufferOptions
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineMemoryBufferOptions.class.equals(Integer.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Long.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Float.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Double.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Boolean.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineMemoryBufferOptions.class.equals(Integer.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineMemoryBufferOptions.class.equals(Float.class)
+ || ObservabilityPipelineMemoryBufferOptions.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineMemoryBufferOptions.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineMemoryBufferOptions.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineMemoryBufferOptions.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineMemoryBufferOptions) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineMemoryBufferOptions'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineMemoryBufferOptions'",
+ e);
+ }
+
+ // deserialize ObservabilityPipelineMemoryBufferSizeOptions
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Integer.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Long.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Float.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Double.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Boolean.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Integer.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Float.class)
+ || ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineMemoryBufferSizeOptions.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineMemoryBufferSizeOptions.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineMemoryBufferSizeOptions.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineMemoryBufferSizeOptions) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineMemoryBufferSizeOptions'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineMemoryBufferSizeOptions'",
+ e);
+ }
+
+ ObservabilityPipelineBufferOptions ret = new ObservabilityPipelineBufferOptions();
+ if (match == 1) {
+ ret.setActualInstance(deserialized);
+ } else {
+ Map res =
+ new ObjectMapper()
+ .readValue(
+ tree.traverse(jp.getCodec()).readValueAsTree().toString(),
+ new TypeReference