diff --git a/.apigentools-info b/.apigentools-info index 090f105a72f..868ddf94a20 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-01-10 22:21:35.663663", - "spec_repo_commit": "2f8c42a8" + "regenerated": "2025-01-13 17:10:06.419911", + "spec_repo_commit": "be17a7b8" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-01-10 22:21:35.678588", - "spec_repo_commit": "2f8c42a8" + "regenerated": "2025-01-13 17:10:06.435212", + "spec_repo_commit": "be17a7b8" } } } \ No newline at end of file diff --git a/.generator/schemas/v1/openapi.yaml b/.generator/schemas/v1/openapi.yaml index d5ac7b28aae..09a4d845e5f 100644 --- a/.generator/schemas/v1/openapi.yaml +++ b/.generator/schemas/v1/openapi.yaml @@ -5926,9 +5926,6 @@ components: Make sure to use an application key created by an admin.' properties: - description: - description: A description of the pipeline. - type: string filter: $ref: '#/components/schemas/LogsFilter' id: @@ -5951,12 +5948,6 @@ components: items: $ref: '#/components/schemas/LogsProcessor' type: array - tags: - description: A list of tags associated with the pipeline. - items: - description: A single tag using the format `key:value`. - type: string - type: array type: description: Type of pipeline. example: pipeline diff --git a/src/main/java/com/datadog/api/client/v1/model/LogsPipeline.java b/src/main/java/com/datadog/api/client/v1/model/LogsPipeline.java index 8ca31ccfe75..0a7e7a6d4a5 100644 --- a/src/main/java/com/datadog/api/client/v1/model/LogsPipeline.java +++ b/src/main/java/com/datadog/api/client/v1/model/LogsPipeline.java @@ -27,23 +27,18 @@ * application key created by an admin. */ @JsonPropertyOrder({ - LogsPipeline.JSON_PROPERTY_DESCRIPTION, LogsPipeline.JSON_PROPERTY_FILTER, LogsPipeline.JSON_PROPERTY_ID, LogsPipeline.JSON_PROPERTY_IS_ENABLED, LogsPipeline.JSON_PROPERTY_IS_READ_ONLY, LogsPipeline.JSON_PROPERTY_NAME, LogsPipeline.JSON_PROPERTY_PROCESSORS, - LogsPipeline.JSON_PROPERTY_TAGS, LogsPipeline.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") public class LogsPipeline { @JsonIgnore public boolean unparsed = false; - public static final String JSON_PROPERTY_DESCRIPTION = "description"; - private String description; - public static final String JSON_PROPERTY_FILTER = "filter"; private LogsFilter filter; @@ -62,9 +57,6 @@ public class LogsPipeline { public static final String JSON_PROPERTY_PROCESSORS = "processors"; private List processors = null; - public static final String JSON_PROPERTY_TAGS = "tags"; - private List tags = null; - public static final String JSON_PROPERTY_TYPE = "type"; private String type; @@ -75,27 +67,6 @@ public LogsPipeline(@JsonProperty(required = true, value = JSON_PROPERTY_NAME) S this.name = name; } - public LogsPipeline description(String description) { - this.description = description; - return this; - } - - /** - * A description of the pipeline. - * - * @return description - */ - @jakarta.annotation.Nullable - @JsonProperty(JSON_PROPERTY_DESCRIPTION) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - public LogsPipeline filter(LogsFilter filter) { this.filter = filter; this.unparsed |= filter.unparsed; @@ -216,35 +187,6 @@ public void setProcessors(List processors) { this.processors = processors; } - public LogsPipeline tags(List tags) { - this.tags = tags; - return this; - } - - public LogsPipeline addTagsItem(String tagsItem) { - if (this.tags == null) { - this.tags = new ArrayList<>(); - } - this.tags.add(tagsItem); - return this; - } - - /** - * A list of tags associated with the pipeline. - * - * @return tags - */ - @jakarta.annotation.Nullable - @JsonProperty(JSON_PROPERTY_TAGS) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getTags() { - return tags; - } - - public void setTags(List tags) { - this.tags = tags; - } - /** * Type of pipeline. * @@ -313,14 +255,12 @@ public boolean equals(Object o) { return false; } LogsPipeline logsPipeline = (LogsPipeline) o; - return Objects.equals(this.description, logsPipeline.description) - && Objects.equals(this.filter, logsPipeline.filter) + return Objects.equals(this.filter, logsPipeline.filter) && Objects.equals(this.id, logsPipeline.id) && Objects.equals(this.isEnabled, logsPipeline.isEnabled) && Objects.equals(this.isReadOnly, logsPipeline.isReadOnly) && Objects.equals(this.name, logsPipeline.name) && Objects.equals(this.processors, logsPipeline.processors) - && Objects.equals(this.tags, logsPipeline.tags) && Objects.equals(this.type, logsPipeline.type) && Objects.equals(this.additionalProperties, logsPipeline.additionalProperties); } @@ -328,30 +268,19 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - description, - filter, - id, - isEnabled, - isReadOnly, - name, - processors, - tags, - type, - additionalProperties); + filter, id, isEnabled, isReadOnly, name, processors, type, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class LogsPipeline {\n"); - sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" filter: ").append(toIndentedString(filter)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" isEnabled: ").append(toIndentedString(isEnabled)).append("\n"); sb.append(" isReadOnly: ").append(toIndentedString(isReadOnly)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" processors: ").append(toIndentedString(processors)).append("\n"); - sb.append(" tags: ").append(toIndentedString(tags)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature b/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature index e811aea3af0..f7a68917032 100644 --- a/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature +++ b/src/test/resources/com/datadog/api/client/v1/api/logs_pipelines.feature @@ -26,14 +26,14 @@ Feature: Logs Pipelines @generated @skip @team:DataDog/event-platform-experience Scenario: Create a pipeline returns "Bad Request" response Given new "CreateLogsPipeline" request - And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []} + And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]} When the request is sent Then the response status is 400 Bad Request @generated @skip @team:DataDog/event-platform-experience Scenario: Create a pipeline returns "OK" response Given new "CreateLogsPipeline" request - And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []} + And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]} When the request is sent Then the response status is 200 OK @@ -81,7 +81,7 @@ Feature: Logs Pipelines Scenario: Update a pipeline returns "Bad Request" response Given new "UpdateLogsPipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []} + And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]} When the request is sent Then the response status is 400 Bad Request @@ -89,7 +89,7 @@ Feature: Logs Pipelines Scenario: Update a pipeline returns "OK" response Given new "UpdateLogsPipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}], "tags": []} + And body with value {"filter": {"query": "source:python"}, "name": "", "processors": [{"grok": {"match_rules": "rule_name_1 foo\nrule_name_2 bar\n", "support_rules": "rule_name_1 foo\nrule_name_2 bar\n"}, "is_enabled": false, "samples": [], "source": "message", "type": "grok-parser"}]} When the request is sent Then the response status is 200 OK