Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .generator/schemas/v1/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6683,6 +6683,9 @@ components:
A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline
can only contain Processors.'
properties:
description:
description: A description of the pipeline.
type: string
filter:
$ref: '#/components/schemas/LogsFilter'
is_enabled:
Expand All @@ -6697,6 +6700,12 @@ components:
items:
$ref: '#/components/schemas/LogsProcessor'
type: array
tags:
description: A list of tags associated with the pipeline.
items:
description: A single tag using the format `key:value`.
type: string
type: array
type:
$ref: '#/components/schemas/LogsPipelineProcessorType'
required:
Expand Down
47 changes: 47 additions & 0 deletions examples/v1/logs-pipelines/CreateLogsPipeline_2599033345.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
// Create a pipeline with nested pipeline processor returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v1.api.LogsPipelinesApi;
import com.datadog.api.client.v1.model.LogsFilter;
import com.datadog.api.client.v1.model.LogsPipeline;
import com.datadog.api.client.v1.model.LogsPipelineProcessor;
import com.datadog.api.client.v1.model.LogsPipelineProcessorType;
import com.datadog.api.client.v1.model.LogsProcessor;
import java.util.Arrays;
import java.util.Collections;

public class Example {
public static void main(String[] args) {
ApiClient defaultClient = ApiClient.getDefaultApiClient();
LogsPipelinesApi apiInstance = new LogsPipelinesApi(defaultClient);

LogsPipeline body =
new LogsPipeline()
.filter(new LogsFilter().query("source:python"))
.name("testPipelineWithNested")
.processors(
Collections.singletonList(
new LogsProcessor(
new LogsPipelineProcessor()
.type(LogsPipelineProcessorType.PIPELINE)
.isEnabled(true)
.name("nested_pipeline_with_metadata")
.filter(new LogsFilter().query("env:production"))
.tags(Arrays.asList("env:prod", "type:nested"))
.description("This is a nested pipeline for production logs"))))
.tags(Collections.singletonList("team:test"))
.description("Pipeline containing nested processor with tags and description");

try {
LogsPipeline result = apiInstance.createLogsPipeline(body);
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling LogsPipelinesApi#createLogsPipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,21 @@
* contain Processors.
*/
@JsonPropertyOrder({
LogsPipelineProcessor.JSON_PROPERTY_DESCRIPTION,
LogsPipelineProcessor.JSON_PROPERTY_FILTER,
LogsPipelineProcessor.JSON_PROPERTY_IS_ENABLED,
LogsPipelineProcessor.JSON_PROPERTY_NAME,
LogsPipelineProcessor.JSON_PROPERTY_PROCESSORS,
LogsPipelineProcessor.JSON_PROPERTY_TAGS,
LogsPipelineProcessor.JSON_PROPERTY_TYPE
})
@jakarta.annotation.Generated(
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
public class LogsPipelineProcessor {
@JsonIgnore public boolean unparsed = false;
public static final String JSON_PROPERTY_DESCRIPTION = "description";
private String description;

public static final String JSON_PROPERTY_FILTER = "filter";
private LogsFilter filter;

Expand All @@ -50,6 +55,9 @@ public class LogsPipelineProcessor {
public static final String JSON_PROPERTY_PROCESSORS = "processors";
private List<LogsProcessor> processors = null;

public static final String JSON_PROPERTY_TAGS = "tags";
private List<String> tags = null;

public static final String JSON_PROPERTY_TYPE = "type";
private LogsPipelineProcessorType type = LogsPipelineProcessorType.PIPELINE;

Expand All @@ -62,6 +70,27 @@ public LogsPipelineProcessor(
this.unparsed |= !type.isValid();
}

public LogsPipelineProcessor description(String description) {
this.description = description;
return this;
}

/**
* A description of the pipeline.
*
* @return description
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_DESCRIPTION)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public String getDescription() {
return description;
}

public void setDescription(String description) {
this.description = description;
}

public LogsPipelineProcessor filter(LogsFilter filter) {
this.filter = filter;
this.unparsed |= filter.unparsed;
Expand Down Expand Up @@ -159,6 +188,35 @@ public void setProcessors(List<LogsProcessor> processors) {
this.processors = processors;
}

public LogsPipelineProcessor tags(List<String> tags) {
this.tags = tags;
return this;
}

public LogsPipelineProcessor addTagsItem(String tagsItem) {
if (this.tags == null) {
this.tags = new ArrayList<>();
}
this.tags.add(tagsItem);
return this;
}

/**
* A list of tags associated with the pipeline.
*
* @return tags
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_TAGS)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public List<String> getTags() {
return tags;
}

public void setTags(List<String> tags) {
this.tags = tags;
}

public LogsPipelineProcessor type(LogsPipelineProcessorType type) {
this.type = type;
this.unparsed |= !type.isValid();
Expand Down Expand Up @@ -239,27 +297,32 @@ public boolean equals(Object o) {
return false;
}
LogsPipelineProcessor logsPipelineProcessor = (LogsPipelineProcessor) o;
return Objects.equals(this.filter, logsPipelineProcessor.filter)
return Objects.equals(this.description, logsPipelineProcessor.description)
&& Objects.equals(this.filter, logsPipelineProcessor.filter)
&& Objects.equals(this.isEnabled, logsPipelineProcessor.isEnabled)
&& Objects.equals(this.name, logsPipelineProcessor.name)
&& Objects.equals(this.processors, logsPipelineProcessor.processors)
&& Objects.equals(this.tags, logsPipelineProcessor.tags)
&& Objects.equals(this.type, logsPipelineProcessor.type)
&& Objects.equals(this.additionalProperties, logsPipelineProcessor.additionalProperties);
}

@Override
public int hashCode() {
return Objects.hash(filter, isEnabled, name, processors, type, additionalProperties);
return Objects.hash(
description, filter, isEnabled, name, processors, tags, type, additionalProperties);
}

@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class LogsPipelineProcessor {\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" filter: ").append(toIndentedString(filter)).append("\n");
sb.append(" isEnabled: ").append(toIndentedString(isEnabled)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" processors: ").append(toIndentedString(processors)).append("\n");
sb.append(" tags: ").append(toIndentedString(tags)).append("\n");
sb.append(" type: ").append(toIndentedString(type)).append("\n");
sb.append(" additionalProperties: ")
.append(toIndentedString(additionalProperties))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2026-03-18T17:10:40.108Z
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
[
{
"httpRequest": {
"body": {
"type": "JSON",
"json": "{\"description\":\"Pipeline containing nested processor with tags and description\",\"filter\":{\"query\":\"source:python\"},\"name\":\"testPipelineWithNested\",\"processors\":[{\"description\":\"This is a nested pipeline for production logs\",\"filter\":{\"query\":\"env:production\"},\"is_enabled\":true,\"name\":\"nested_pipeline_with_metadata\",\"tags\":[\"env:prod\",\"type:nested\"],\"type\":\"pipeline\"}],\"tags\":[\"team:test\"]}"
},
"headers": {},
"method": "POST",
"path": "/api/v1/logs/config/pipelines",
"keepAlive": false,
"secure": true
},
"httpResponse": {
"body": "{\"id\":\"GyYNpCrVQtOB3KhqJSpOOA\",\"type\":\"pipeline\",\"name\":\"testPipelineWithNested\",\"is_enabled\":false,\"is_read_only\":false,\"filter\":{\"query\":\"source:python\"},\"processors\":[{\"type\":\"pipeline\",\"name\":\"nested_pipeline_with_metadata\",\"is_enabled\":true,\"filter\":{\"query\":\"env:production\"},\"processors\":[],\"tags\":[\"env:prod\",\"type:nested\"],\"description\":\"This is a nested pipeline for production logs\"}],\"tags\":[\"team:test\"],\"description\":\"Pipeline containing nested processor with tags and description\"}\n",
"headers": {
"Content-Type": [
"application/json"
]
},
"statusCode": 200,
"reasonPhrase": "OK"
},
"times": {
"remainingTimes": 1
},
"timeToLive": {
"unlimited": true
},
"id": "43e467db-3052-c3f2-f66c-542643a0e3a7"
},
{
"httpRequest": {
"headers": {},
"method": "DELETE",
"path": "/api/v1/logs/config/pipelines/GyYNpCrVQtOB3KhqJSpOOA",
"keepAlive": false,
"secure": true
},
"httpResponse": {
"body": "{}\n",
"headers": {
"Content-Type": [
"application/json"
]
},
"statusCode": 200,
"reasonPhrase": "OK"
},
"times": {
"remainingTimes": 1
},
"timeToLive": {
"unlimited": true
},
"id": "9d46b7bd-ff78-f8c6-c944-e3cd37cf4ca4"
}
]
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,13 @@ Feature: Logs Pipelines
When the request is sent
Then the response status is 200 OK

@team:DataDog/event-platform-experience
Scenario: Create a pipeline with nested pipeline processor returns "OK" response
Given new "CreateLogsPipeline" request
And body with value {"filter": {"query": "source:python"}, "name": "testPipelineWithNested", "processors": [{"type": "pipeline", "is_enabled": true, "name": "nested_pipeline_with_metadata", "filter": {"query": "env:production"}, "tags": ["env:prod", "type:nested"], "description": "This is a nested pipeline for production logs"}], "tags": ["team:test"], "description": "Pipeline containing nested processor with tags and description"}
When the request is sent
Then the response status is 200 OK

@team:DataDog/event-platform-experience
Scenario: Create a pipeline with schema processor
Given new "CreateLogsPipeline" request
Expand Down
Loading