diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml
index c1ace36c385..54628dbe576 100644
--- a/.generator/schemas/v2/openapi.yaml
+++ b/.generator/schemas/v2/openapi.yaml
@@ -43045,12 +43045,14 @@ components:
ObservabilityPipelineAmazonS3Source:
description: |-
The `amazon_s3` source ingests logs from an Amazon S3 bucket.
- It supports AWS authentication and TLS encryption.
+ It supports AWS authentication, TLS encryption, and configurable compression.
**Supported pipeline types:** logs
properties:
auth:
$ref: "#/components/schemas/ObservabilityPipelineAwsAuth"
+ compression:
+ $ref: "#/components/schemas/ObservabilityPipelineAmazonS3SourceCompression"
id:
description: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components).
example: aws-s3-source
@@ -43073,6 +43075,20 @@ components:
- region
type: object
x-pipeline-types: [logs]
+ ObservabilityPipelineAmazonS3SourceCompression:
+ description: Compression format for objects retrieved from the S3 bucket. Use `auto` to detect compression from the object's Content-Encoding header or file extension.
+ enum:
+ - auto
+ - none
+ - gzip
+ - zstd
+ example: gzip
+ type: string
+ x-enum-varnames:
+ - AUTO
+ - NONE
+ - GZIP
+ - ZSTD
ObservabilityPipelineAmazonS3SourceType:
default: amazon_s3
description: The source type. Always `amazon_s3`.
diff --git a/examples/v2/observability-pipelines/ValidatePipeline_99164570.java b/examples/v2/observability-pipelines/ValidatePipeline_99164570.java
new file mode 100644
index 00000000000..26527ce3fc9
--- /dev/null
+++ b/examples/v2/observability-pipelines/ValidatePipeline_99164570.java
@@ -0,0 +1,92 @@
+// Validate an observability pipeline with amazon S3 source compression returns "OK" response
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
+import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3Source;
+import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3SourceCompression;
+import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3SourceType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfig;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorGroup;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineSpec;
+import com.datadog.api.client.v2.model.ObservabilityPipelineSpecData;
+import com.datadog.api.client.v2.model.ValidationResponse;
+import java.util.Collections;
+
+public class Example {
+ public static void main(String[] args) {
+ ApiClient defaultClient = ApiClient.getDefaultApiClient();
+ ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);
+
+ ObservabilityPipelineSpec body =
+ new ObservabilityPipelineSpec()
+ .data(
+ new ObservabilityPipelineSpecData()
+ .attributes(
+ new ObservabilityPipelineDataAttributes()
+ .config(
+ new ObservabilityPipelineConfig()
+ .destinations(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigDestinationItem(
+ new ObservabilityPipelineDatadogLogsDestination()
+ .id("datadog-logs-destination")
+ .inputs(
+ Collections.singletonList(
+ "my-processor-group"))
+ .type(
+ ObservabilityPipelineDatadogLogsDestinationType
+ .DATADOG_LOGS))))
+ .processorGroups(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigProcessorGroup()
+ .enabled(true)
+ .id("my-processor-group")
+ .include("service:my-service")
+ .inputs(
+ Collections.singletonList("amazon-s3-source"))
+ .processors(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigProcessorItem(
+ new ObservabilityPipelineFilterProcessor()
+ .enabled(true)
+ .id("filter-processor")
+ .include("service:my-service")
+ .type(
+ ObservabilityPipelineFilterProcessorType
+ .FILTER))))))
+ .sources(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigSourceItem(
+ new ObservabilityPipelineAmazonS3Source()
+ .id("amazon-s3-source")
+ .type(
+ ObservabilityPipelineAmazonS3SourceType
+ .AMAZON_S3)
+ .region("us-east-1")
+ .compression(
+ ObservabilityPipelineAmazonS3SourceCompression
+ .GZIP)))))
+ .name("Pipeline with S3 Source Compression"))
+ .type("pipelines"));
+
+ try {
+ ValidationResponse result = apiInstance.validatePipeline(body);
+ System.out.println(result);
+ } catch (ApiException e) {
+ System.err.println("Exception when calling ObservabilityPipelinesApi#validatePipeline");
+ System.err.println("Status code: " + e.getCode());
+ System.err.println("Reason: " + e.getResponseBody());
+ System.err.println("Response headers: " + e.getResponseHeaders());
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
index 5c5339ec0e4..deea70aca50 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
@@ -19,12 +19,13 @@
/**
* The amazon_s3 source ingests logs from an Amazon S3 bucket. It supports AWS
- * authentication and TLS encryption.
+ * authentication, TLS encryption, and configurable compression.
*
*
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH,
+ ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_COMPRESSION,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_ID,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_REGION,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TLS,
@@ -38,6 +39,9 @@ public class ObservabilityPipelineAmazonS3Source {
public static final String JSON_PROPERTY_AUTH = "auth";
private ObservabilityPipelineAwsAuth auth;
+ public static final String JSON_PROPERTY_COMPRESSION = "compression";
+ private ObservabilityPipelineAmazonS3SourceCompression compression;
+
public static final String JSON_PROPERTY_ID = "id";
private String id;
@@ -91,6 +95,33 @@ public void setAuth(ObservabilityPipelineAwsAuth auth) {
this.auth = auth;
}
+ public ObservabilityPipelineAmazonS3Source compression(
+ ObservabilityPipelineAmazonS3SourceCompression compression) {
+ this.compression = compression;
+ this.unparsed |= !compression.isValid();
+ return this;
+ }
+
+ /**
+ * Compression format for objects retrieved from the S3 bucket. Use auto to detect
+ * compression from the object's Content-Encoding header or file extension.
+ *
+ * @return compression
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_COMPRESSION)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineAmazonS3SourceCompression getCompression() {
+ return compression;
+ }
+
+ public void setCompression(ObservabilityPipelineAmazonS3SourceCompression compression) {
+ if (!compression.isValid()) {
+ this.unparsed = true;
+ }
+ this.compression = compression;
+ }
+
public ObservabilityPipelineAmazonS3Source id(String id) {
this.id = id;
return this;
@@ -257,6 +288,7 @@ public boolean equals(Object o) {
ObservabilityPipelineAmazonS3Source observabilityPipelineAmazonS3Source =
(ObservabilityPipelineAmazonS3Source) o;
return Objects.equals(this.auth, observabilityPipelineAmazonS3Source.auth)
+ && Objects.equals(this.compression, observabilityPipelineAmazonS3Source.compression)
&& Objects.equals(this.id, observabilityPipelineAmazonS3Source.id)
&& Objects.equals(this.region, observabilityPipelineAmazonS3Source.region)
&& Objects.equals(this.tls, observabilityPipelineAmazonS3Source.tls)
@@ -268,7 +300,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(auth, id, region, tls, type, urlKey, additionalProperties);
+ return Objects.hash(auth, compression, id, region, tls, type, urlKey, additionalProperties);
}
@Override
@@ -276,6 +308,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ObservabilityPipelineAmazonS3Source {\n");
sb.append(" auth: ").append(toIndentedString(auth)).append("\n");
+ sb.append(" compression: ").append(toIndentedString(compression)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" region: ").append(toIndentedString(region)).append("\n");
sb.append(" tls: ").append(toIndentedString(tls)).append("\n");
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceCompression.java
new file mode 100644
index 00000000000..c29ac0568bb
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceCompression.java
@@ -0,0 +1,72 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Compression format for objects retrieved from the S3 bucket. Use auto to detect
+ * compression from the object's Content-Encoding header or file extension.
+ */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineAmazonS3SourceCompression
+ .ObservabilityPipelineAmazonS3SourceCompressionSerializer.class)
+public class ObservabilityPipelineAmazonS3SourceCompression extends ModelEnum