From 1d6b8540b5d80c28b1eea68850a8fbb9321ccf6d Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Fri, 2 Jan 2026 14:39:52 +0000 Subject: [PATCH] Regenerate client from commit 0db975e of spec repo --- .generator/schemas/v2/openapi.yaml | 720 ++++++++++++++---- docs/datadog_api_client.v2.model.rst | 143 +++- .../observability-pipelines/CreatePipeline.py | 2 +- .../observability-pipelines/UpdatePipeline.py | 2 +- .../ValidatePipeline.py | 2 +- src/datadog_api_client/configuration.py | 12 +- .../v2/api/observability_pipelines_api.py | 6 +- ...ability_pipeline_add_hostname_processor.py | 79 ++ ...ty_pipeline_add_hostname_processor_type.py | 37 + ...ability_pipeline_cloud_prem_destination.py | 56 ++ ...ty_pipeline_cloud_prem_destination_type.py | 35 + .../v2/model/observability_pipeline_config.py | 26 +- ...bility_pipeline_config_destination_item.py | 35 + ...ability_pipeline_config_processor_group.py | 78 +- ...vability_pipeline_config_processor_item.py | 218 +++--- ...servability_pipeline_config_source_item.py | 14 +- ...lity_pipeline_elasticsearch_destination.py | 14 + ...e_elasticsearch_destination_data_stream.py | 56 ++ ...ity_pipeline_enrichment_table_processor.py | 16 +- ...peline_enrichment_table_reference_table.py | 49 ++ ...bservability_pipeline_kafka_destination.py | 196 +++++ ..._pipeline_kafka_destination_compression.py | 47 ++ ...ity_pipeline_kafka_destination_encoding.py | 38 + ...ability_pipeline_kafka_destination_type.py | 35 + ...ility_pipeline_kafka_librdkafka_option.py} | 4 +- ...y => observability_pipeline_kafka_sasl.py} | 18 +- ...vability_pipeline_kafka_sasl_mechanism.py} | 20 +- .../observability_pipeline_kafka_source.py | 28 +- ...rvability_pipeline_opentelemetry_source.py | 83 ++ ...lity_pipeline_opentelemetry_source_type.py | 37 + ...ervability_pipeline_parse_xml_processor.py | 148 ++++ ...ility_pipeline_parse_xml_processor_type.py | 35 + .../observability_pipeline_quota_processor.py | 16 +- ...ipeline_quota_processor_overflow_action.py | 2 +- ...observability_pipeline_sample_processor.py | 29 +- ...canner_processor_custom_pattern_options.py | 12 +- ...anner_processor_library_pattern_options.py | 15 +- ...vability_pipeline_split_array_processor.py | 99 +++ ...line_split_array_processor_array_config.py | 39 + ...ity_pipeline_split_array_processor_type.py | 35 + src/datadog_api_client/v2/models/__init__.py | 84 +- tests/v2/features/given.json | 24 +- .../features/observability_pipelines.feature | 18 +- tests/v2/features/undo.json | 50 +- 44 files changed, 2288 insertions(+), 424 deletions(-) create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py rename src/datadog_api_client/v2/model/{observability_pipeline_kafka_source_librdkafka_option.py => observability_pipeline_kafka_librdkafka_option.py} (84%) rename src/datadog_api_client/v2/model/{observability_pipeline_kafka_source_sasl.py => observability_pipeline_kafka_sasl.py} (59%) rename src/datadog_api_client/v2/model/{observability_pipeline_pipeline_kafka_source_sasl_mechanism.py => observability_pipeline_kafka_sasl_mechanism.py} (50%) create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b7484..dbe6ebb4cd 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35178,6 +35178,43 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: The `add_hostname` processor adds the hostname to log events. + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. properties: @@ -35455,6 +35492,37 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: The `cloud_prem` destination sends logs to Datadog CloudPrem. + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35473,7 +35541,7 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array - processors: + processor_groups: description: A list of processor groups that transform or enrich log data. example: - enabled: true @@ -35510,6 +35578,7 @@ components: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' @@ -35528,6 +35597,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35600,24 +35670,27 @@ components: ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -35637,6 +35710,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. @@ -36030,6 +36104,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36063,6 +36139,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36203,7 +36296,8 @@ components: type: object ObservabilityPipelineEnrichmentTableProcessor: description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36224,6 +36318,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36246,6 +36342,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36826,6 +36944,151 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: The `kafka` destination sends logs to Apache Kafka topics. + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -36843,10 +37106,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37129,6 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37116,6 +37355,45 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: description: The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. @@ -37279,17 +37557,72 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: description: The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor @@ -37298,9 +37631,10 @@ components: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37344,6 +37678,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37381,7 +37717,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37711,6 +38048,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37727,18 +38074,13 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object ObservabilityPipelineSampleProcessorType: @@ -37907,6 +38249,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -37962,6 +38309,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38442,6 +38794,68 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: The `split_array` processor splits array fields into separate events + based on configured rules. + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: description: The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). @@ -75879,6 +76293,103 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79313,103 +79824,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: description: Delete a pipeline. diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 2e859a4f0f..b40eff45fd 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -15369,6 +15369,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source module -------------------------------------------------------------------------------------------- @@ -15467,6 +15481,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -15663,6 +15691,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destinatio :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_data\_stream module +------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_type module ---------------------------------------------------------------------------------------------- @@ -15740,6 +15775,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_proces :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_reference\_table module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_field\_value module -------------------------------------------------------------------------- @@ -15978,24 +16020,59 @@ datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source\_typ :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module ---------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination module +-------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_librdkafka\_option module ------------------------------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_compression module +--------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_sasl module ---------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_encoding module +------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_type module +-------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_librdkafka\_option module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl module +------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl\_mechanism module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source :members: :show-inheritance: @@ -16104,6 +16181,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_open\_search\_destination :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor module ------------------------------------------------------------------------------------ @@ -16153,10 +16244,17 @@ datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_t :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module ------------------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor\_type module +----------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type :members: :show-inheritance: @@ -16720,6 +16818,27 @@ datadog\_api\_client.v2.model.observability\_pipeline\_spec\_data module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_array\_config module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination module -------------------------------------------------------------------------------------- diff --git a/examples/v2/observability-pipelines/CreatePipeline.py b/examples/v2/observability-pipelines/CreatePipeline.py index 87b50b26c0..54e6232db7 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.py +++ b/examples/v2/observability-pipelines/CreatePipeline.py @@ -41,7 +41,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/examples/v2/observability-pipelines/UpdatePipeline.py b/examples/v2/observability-pipelines/UpdatePipeline.py index c8e05195fa..e1c8cf4faa 100644 --- a/examples/v2/observability-pipelines/UpdatePipeline.py +++ b/examples/v2/observability-pipelines/UpdatePipeline.py @@ -45,7 +45,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/examples/v2/observability-pipelines/ValidatePipeline.py b/examples/v2/observability-pipelines/ValidatePipeline.py index a55d4a6368..e950443ff5 100644 --- a/examples/v2/observability-pipelines/ValidatePipeline.py +++ b/examples/v2/observability-pipelines/ValidatePipeline.py @@ -41,7 +41,7 @@ type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, ), ], - processors=[ + processor_groups=[ ObservabilityPipelineConfigProcessorGroup( enabled=True, id="my-processor-group", diff --git a/src/datadog_api_client/configuration.py b/src/datadog_api_client/configuration.py index 5f29613298..cb031ef99a 100644 --- a/src/datadog_api_client/configuration.py +++ b/src/datadog_api_client/configuration.py @@ -359,6 +359,12 @@ def __init__( "v2.update_monitor_user_template": False, "v2.validate_existing_monitor_user_template": False, "v2.validate_monitor_user_template": False, + "v2.create_pipeline": False, + "v2.delete_pipeline": False, + "v2.get_pipeline": False, + "v2.list_pipelines": False, + "v2.update_pipeline": False, + "v2.validate_pipeline": False, "v2.list_role_templates": False, "v2.create_connection": False, "v2.delete_connection": False, @@ -370,12 +376,6 @@ def __init__( "v2.query_event_filtered_users": False, "v2.query_users": False, "v2.update_connection": False, - "v2.create_pipeline": False, - "v2.delete_pipeline": False, - "v2.get_pipeline": False, - "v2.list_pipelines": False, - "v2.update_pipeline": False, - "v2.validate_pipeline": False, "v2.create_scorecard_outcomes_batch": False, "v2.create_scorecard_rule": False, "v2.delete_scorecard_rule": False, diff --git a/src/datadog_api_client/v2/api/observability_pipelines_api.py b/src/datadog_api_client/v2/api/observability_pipelines_api.py index 2c14f8497a..12332dfd33 100644 --- a/src/datadog_api_client/v2/api/observability_pipelines_api.py +++ b/src/datadog_api_client/v2/api/observability_pipelines_api.py @@ -31,7 +31,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ObservabilityPipeline,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "create_pipeline", "http_method": "POST", "version": "v2", @@ -97,7 +97,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ListPipelinesResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "list_pipelines", "http_method": "GET", "version": "v2", @@ -150,7 +150,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ValidationResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/validate", "operation_id": "validate_pipeline", "http_method": "POST", "version": "v2", diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py new file mode 100644 index 0000000000..695994d404 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py @@ -0,0 +1,79 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + +class ObservabilityPipelineAddHostnameProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineAddHostnameProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineAddHostnameProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``add_hostname`` processor adds the hostname to log events. + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param type: The processor type. The value should always be ``add_hostname``. + :type type: ObservabilityPipelineAddHostnameProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py new file mode 100644 index 0000000000..3f0c2dd4ff --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAddHostnameProcessorType(ModelSimple): + """ + The processor type. The value should always be `add_hostname`. + + :param value: If omitted defaults to "add_hostname". Must be one of ["add_hostname"]. + :type value: str + """ + + allowed_values = { + "add_hostname", + } + ADD_HOSTNAME: ClassVar["ObservabilityPipelineAddHostnameProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME = ObservabilityPipelineAddHostnameProcessorType( + "add_hostname" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py new file mode 100644 index 0000000000..4d163daf30 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py @@ -0,0 +1,56 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + +class ObservabilityPipelineCloudPremDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineCloudPremDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineCloudPremDestinationType, **kwargs): + """ + The ``cloud_prem`` destination sends logs to Datadog CloudPrem. + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``cloud_prem``. + :type type: ObservabilityPipelineCloudPremDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py new file mode 100644 index 0000000000..88110c24ef --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineCloudPremDestinationType(ModelSimple): + """ + The destination type. The value should always be `cloud_prem`. + + :param value: If omitted defaults to "cloud_prem". Must be one of ["cloud_prem"]. + :type value: str + """ + + allowed_values = { + "cloud_prem", + } + CLOUD_PREM: ClassVar["ObservabilityPipelineCloudPremDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM = ObservabilityPipelineCloudPremDestinationType("cloud_prem") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config.py b/src/datadog_api_client/v2/model/observability_pipeline_config.py index 48cf842a5d..57f3afdeec 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config.py @@ -26,6 +26,9 @@ from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( ObservabilityPipelineDatadogLogsDestination, ) + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, + ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) @@ -76,6 +79,9 @@ from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( ObservabilityPipelineGooglePubSubDestination, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, @@ -110,6 +116,9 @@ ) from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) class ObservabilityPipelineConfig(ModelNormal): @@ -127,13 +136,13 @@ def openapi_types(_): return { "destinations": ([ObservabilityPipelineConfigDestinationItem],), - "processors": ([ObservabilityPipelineConfigProcessorGroup],), + "processor_groups": ([ObservabilityPipelineConfigProcessorGroup],), "sources": ([ObservabilityPipelineConfigSourceItem],), } attribute_map = { "destinations": "destinations", - "processors": "processors", + "processor_groups": "processor_groups", "sources": "sources", } @@ -143,6 +152,7 @@ def __init__( Union[ ObservabilityPipelineConfigDestinationItem, ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, @@ -161,6 +171,7 @@ def __init__( ObservabilityPipelineAmazonSecurityLakeDestination, ObservabilityPipelineCrowdStrikeNextGenSiemDestination, ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, ] ], sources: List[ @@ -182,9 +193,10 @@ def __init__( ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineOpentelemetrySource, ] ], - processors: Union[List[ObservabilityPipelineConfigProcessorGroup], UnsetType] = unset, + processor_groups: Union[List[ObservabilityPipelineConfigProcessorGroup], UnsetType] = unset, **kwargs, ): """ @@ -193,14 +205,14 @@ def __init__( :param destinations: A list of destination components where processed logs are sent. :type destinations: [ObservabilityPipelineConfigDestinationItem] - :param processors: A list of processor groups that transform or enrich log data. - :type processors: [ObservabilityPipelineConfigProcessorGroup], optional + :param processor_groups: A list of processor groups that transform or enrich log data. + :type processor_groups: [ObservabilityPipelineConfigProcessorGroup], optional :param sources: A list of configured data sources for the pipeline. :type sources: [ObservabilityPipelineConfigSourceItem] """ - if processors is not unset: - kwargs["processors"] = processors + if processor_groups is not unset: + kwargs["processor_groups"] = processor_groups super().__init__(kwargs) self_.destinations = destinations diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index df018bec4e..0e0376b0a8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -80,6 +80,9 @@ def __init__(self, **kwargs): :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + :param keepalive: Optional socket keepalive duration in milliseconds. :type keepalive: int, optional @@ -124,6 +127,30 @@ def __init__(self, **kwargs): :param topic: The Pub/Sub topic name to publish logs to. :type topic: str + + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional + + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional + + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional + + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional + + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional """ super().__init__(kwargs) @@ -139,6 +166,9 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( ObservabilityPipelineDatadogLogsDestination, ) + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, + ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) @@ -189,10 +219,14 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( ObservabilityPipelineGooglePubSubDestination, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) return { "oneOf": [ ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, @@ -211,5 +245,6 @@ def _composed_schemas(_): ObservabilityPipelineAmazonSecurityLakeDestination, ObservabilityPipelineCrowdStrikeNextGenSiemDestination, ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py index 6579870f58..43a1388133 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py @@ -17,48 +17,57 @@ from datadog_api_client.v2.model.observability_pipeline_config_processor_item import ( ObservabilityPipelineConfigProcessorItem, ) - from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor - from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( - ObservabilityPipelineParseJSONProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( ObservabilityPipelineAddFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( - ObservabilityPipelineRemoveFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( - ObservabilityPipelineRenameFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( ObservabilityPipelineGenerateMetricsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( - ObservabilityPipelineSensitiveDataScannerProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( + ObservabilityPipelineParseJSONProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, + from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor + from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( + ObservabilityPipelineRemoveFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, + from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( + ObservabilityPipelineRenameFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, - ) class ObservabilityPipelineConfigProcessorGroup(ModelNormal): @@ -95,24 +104,27 @@ def __init__( processors: List[ Union[ ObservabilityPipelineConfigProcessorItem, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, ] ], display_name: Union[str, UnsetType] = unset, diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 8becc43c0d..fc33a559f2 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -21,19 +21,82 @@ def __init__(self, **kwargs): :param enabled: Whether this processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + :param id: The unique identifier for this component. Used to reference this processor in the pipeline. :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs this processor targets. :type include: str - :param type: The processor type. The value should always be `filter`. - :type type: ObservabilityPipelineFilterProcessorType + :param type: The processor type. The value should always be `add_env_vars`. + :type type: ObservabilityPipelineAddEnvVarsProcessorType + + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + + :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. + :type fields: [ObservabilityPipelineFieldValue] + + :param remaps: Array of VRL remap rules. + :type remaps: [ObservabilityPipelineCustomProcessorRemap] + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineDatadogTagsProcessorAction + + :param keys: A list of tag keys. + :type keys: [str] + + :param mode: The processing mode. + :type mode: ObservabilityPipelineDatadogTagsProcessorMode + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric], optional + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] :param field: The name of the log field that contains a JSON string. :type field: str - :param drop_events: If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param drop_events: If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. :type drop_events: bool, optional :param ignore_when_missing_partitions: If `true`, the processor skips quota checks when partition fields are missing from the logs. @@ -45,7 +108,7 @@ def __init__(self, **kwargs): :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. @@ -57,41 +120,11 @@ def __init__(self, **kwargs): :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional - :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. - :type fields: [ObservabilityPipelineFieldValue] - - :param metrics: Configuration for generating individual metrics. - :type metrics: [ObservabilityPipelineGeneratedMetric], optional - - :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional - - :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. - :type disable_library_rules: bool, optional - - :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. - :type rules: [ObservabilityPipelineParseGrokProcessorRule] - - :param mappings: A list of mapping rules to convert events to the OCSF format. - :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] - - :param variables: A list of environment variable mappings to apply to log fields. - :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] - - :param mode: The deduplication mode to apply to the fields. - :type mode: ObservabilityPipelineDedupeProcessorMode - - :param file: Defines a static enrichment table loaded from a CSV file. - :type file: ObservabilityPipelineEnrichmentTableFile, optional - - :param geoip: Uses a GeoIP database to enrich logs based on an IP field. - :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional - - :param target: Path where enrichment results should be stored in the log. - :type target: str + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional :param group_by: A list of fields used to group log events for merging. :type group_by: [str] @@ -99,20 +132,17 @@ def __init__(self, **kwargs): :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + :param percentage: The percentage of logs to sample. + :type percentage: float + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. :type threshold: int :param window: The time window in seconds over which the threshold applies. :type window: float - - :param remaps: Array of VRL remap rules. - :type remaps: [ObservabilityPipelineCustomProcessorRemap] - - :param action: The action to take on tags with matching keys. - :type action: ObservabilityPipelineDatadogTagsProcessorAction - - :param keys: A list of tag keys. - :type keys: [str] """ super().__init__(kwargs) @@ -125,17 +155,50 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( + ObservabilityPipelineAddFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( + ObservabilityPipelineCustomProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( + ObservabilityPipelineDedupeProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_filter_processor import ( ObservabilityPipelineFilterProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ( ObservabilityPipelineQuotaProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( - ObservabilityPipelineAddFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( + ObservabilityPipelineReduceProcessor, ) from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( ObservabilityPipelineRemoveFieldsProcessor, @@ -143,62 +206,41 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( - ObservabilityPipelineGenerateMetricsProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sample_processor import ( ObservabilityPipelineSampleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( - ObservabilityPipelineParseGrokProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( ObservabilityPipelineSensitiveDataScannerProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( - ObservabilityPipelineDedupeProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( - ObservabilityPipelineReduceProcessor, + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( - ObservabilityPipelineCustomProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, - ) return { "oneOf": [ + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py index 04e30172c1..03f3f2840f 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py @@ -22,10 +22,10 @@ def __init__(self, **kwargs): :type id: str :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional + :type sasl: ObservabilityPipelineKafkaSasl, optional :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional @@ -66,6 +66,12 @@ def __init__(self, **kwargs): :param framing: Framing method configuration for the socket source. :type framing: ObservabilityPipelineSocketSourceFraming + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional """ super().__init__(kwargs) @@ -118,6 +124,9 @@ def _composed_schemas(_): ObservabilityPipelineLogstashSource, ) from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) return { "oneOf": [ @@ -137,5 +146,6 @@ def _composed_schemas(_): ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineOpentelemetrySource, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py index 8ce413b7ef..59babc31b8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -17,6 +17,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -28,6 +31,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -35,6 +41,7 @@ def openapi_types(_): return { "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), "bulk_index": (str,), + "data_stream": (ObservabilityPipelineElasticsearchDestinationDataStream,), "id": (str,), "inputs": ([str],), "type": (ObservabilityPipelineElasticsearchDestinationType,), @@ -43,6 +50,7 @@ def openapi_types(_): attribute_map = { "api_version": "api_version", "bulk_index": "bulk_index", + "data_stream": "data_stream", "id": "id", "inputs": "inputs", "type": "type", @@ -55,6 +63,7 @@ def __init__( type: ObservabilityPipelineElasticsearchDestinationType, api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, bulk_index: Union[str, UnsetType] = unset, + data_stream: Union[ObservabilityPipelineElasticsearchDestinationDataStream, UnsetType] = unset, **kwargs, ): """ @@ -66,6 +75,9 @@ def __init__( :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + :param id: The unique identifier for this component. :type id: str @@ -79,6 +91,8 @@ def __init__( kwargs["api_version"] = api_version if bulk_index is not unset: kwargs["bulk_index"] = bulk_index + if data_stream is not unset: + kwargs["data_stream"] = data_stream super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py new file mode 100644 index 0000000000..8046b936b9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py @@ -0,0 +1,56 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineElasticsearchDestinationDataStream(ModelNormal): + @cached_property + def openapi_types(_): + return { + "dataset": (str,), + "dtype": (str,), + "namespace": (str,), + } + + attribute_map = { + "dataset": "dataset", + "dtype": "dtype", + "namespace": "namespace", + } + + def __init__( + self_, + dataset: Union[str, UnsetType] = unset, + dtype: Union[str, UnsetType] = unset, + namespace: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + + :param dataset: The data stream dataset for your logs. This groups logs by their source or application. + :type dataset: str, optional + + :param dtype: The data stream type for your logs. This determines how logs are categorized within the data stream. + :type dtype: str, optional + + :param namespace: The data stream namespace for your logs. This separates logs into different environments or domains. + :type namespace: str, optional + """ + if dataset is not unset: + kwargs["dataset"] = dataset + if dtype is not unset: + kwargs["dtype"] = dtype + if namespace is not unset: + kwargs["namespace"] = namespace + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py index 3483c95ae4..8277562633 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py @@ -20,6 +20,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -34,6 +37,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -45,6 +51,7 @@ def openapi_types(_): "geoip": (ObservabilityPipelineEnrichmentTableGeoIp,), "id": (str,), "include": (str,), + "reference_table": (ObservabilityPipelineEnrichmentTableReferenceTable,), "target": (str,), "type": (ObservabilityPipelineEnrichmentTableProcessorType,), } @@ -56,6 +63,7 @@ def openapi_types(_): "geoip": "geoip", "id": "id", "include": "include", + "reference_table": "reference_table", "target": "target", "type": "type", } @@ -70,10 +78,11 @@ def __init__( display_name: Union[str, UnsetType] = unset, file: Union[ObservabilityPipelineEnrichmentTableFile, UnsetType] = unset, geoip: Union[ObservabilityPipelineEnrichmentTableGeoIp, UnsetType] = unset, + reference_table: Union[ObservabilityPipelineEnrichmentTableReferenceTable, UnsetType] = unset, **kwargs, ): """ - The ``enrichment_table`` processor enriches logs using a static CSV file or GeoIP database. + The ``enrichment_table`` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of ``file`` , ``geoip`` , or ``reference_table`` must be configured. :param display_name: The display name for a component. :type display_name: str, optional @@ -93,6 +102,9 @@ def __init__( :param include: A Datadog search query used to determine which logs this processor targets. :type include: str + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + :param target: Path where enrichment results should be stored in the log. :type target: str @@ -105,6 +117,8 @@ def __init__( kwargs["file"] = file if geoip is not unset: kwargs["geoip"] = geoip + if reference_table is not unset: + kwargs["reference_table"] = reference_table super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py new file mode 100644 index 0000000000..9788ea4973 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py @@ -0,0 +1,49 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineEnrichmentTableReferenceTable(ModelNormal): + @cached_property + def openapi_types(_): + return { + "columns": ([str],), + "key_field": (str,), + "table_id": (str,), + } + + attribute_map = { + "columns": "columns", + "key_field": "key_field", + "table_id": "table_id", + } + + def __init__(self_, key_field: str, table_id: str, columns: Union[List[str], UnsetType] = unset, **kwargs): + """ + Uses a Datadog reference table to enrich logs. + + :param columns: List of column names to include from the reference table. If not provided, all columns are included. + :type columns: [str], optional + + :param key_field: Path to the field in the log event to match against the reference table. + :type key_field: str + + :param table_id: The unique identifier of the reference table. + :type table_id: str + """ + if columns is not unset: + kwargs["columns"] = columns + super().__init__(kwargs) + + self_.key_field = key_field + self_.table_id = table_id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py new file mode 100644 index 0000000000..9817607ee4 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py @@ -0,0 +1,196 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + +class ObservabilityPipelineKafkaDestination(ModelNormal): + validations = { + "message_timeout_ms": { + "inclusive_minimum": 1, + }, + "rate_limit_duration_secs": { + "inclusive_minimum": 1, + }, + "rate_limit_num": { + "inclusive_minimum": 1, + }, + "socket_timeout_ms": { + "inclusive_maximum": 300000, + "inclusive_minimum": 10, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + return { + "compression": (ObservabilityPipelineKafkaDestinationCompression,), + "encoding": (ObservabilityPipelineKafkaDestinationEncoding,), + "headers_key": (str,), + "id": (str,), + "inputs": ([str],), + "key_field": (str,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "message_timeout_ms": (int,), + "rate_limit_duration_secs": (int,), + "rate_limit_num": (int,), + "sasl": (ObservabilityPipelineKafkaSasl,), + "socket_timeout_ms": (int,), + "tls": (ObservabilityPipelineTls,), + "topic": (str,), + "type": (ObservabilityPipelineKafkaDestinationType,), + } + + attribute_map = { + "compression": "compression", + "encoding": "encoding", + "headers_key": "headers_key", + "id": "id", + "inputs": "inputs", + "key_field": "key_field", + "librdkafka_options": "librdkafka_options", + "message_timeout_ms": "message_timeout_ms", + "rate_limit_duration_secs": "rate_limit_duration_secs", + "rate_limit_num": "rate_limit_num", + "sasl": "sasl", + "socket_timeout_ms": "socket_timeout_ms", + "tls": "tls", + "topic": "topic", + "type": "type", + } + + def __init__( + self_, + encoding: ObservabilityPipelineKafkaDestinationEncoding, + id: str, + inputs: List[str], + topic: str, + type: ObservabilityPipelineKafkaDestinationType, + compression: Union[ObservabilityPipelineKafkaDestinationCompression, UnsetType] = unset, + headers_key: Union[str, UnsetType] = unset, + key_field: Union[str, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + message_timeout_ms: Union[int, UnsetType] = unset, + rate_limit_duration_secs: Union[int, UnsetType] = unset, + rate_limit_num: Union[int, UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, + socket_timeout_ms: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``kafka`` destination sends logs to Apache Kafka topics. + + :param compression: Compression codec for Kafka messages. + :type compression: ObservabilityPipelineKafkaDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineKafkaDestinationEncoding + + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional + + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional + + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional + + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param topic: The Kafka topic name to publish logs to. + :type topic: str + + :param type: The destination type. The value should always be ``kafka``. + :type type: ObservabilityPipelineKafkaDestinationType + """ + if compression is not unset: + kwargs["compression"] = compression + if headers_key is not unset: + kwargs["headers_key"] = headers_key + if key_field is not unset: + kwargs["key_field"] = key_field + if librdkafka_options is not unset: + kwargs["librdkafka_options"] = librdkafka_options + if message_timeout_ms is not unset: + kwargs["message_timeout_ms"] = message_timeout_ms + if rate_limit_duration_secs is not unset: + kwargs["rate_limit_duration_secs"] = rate_limit_duration_secs + if rate_limit_num is not unset: + kwargs["rate_limit_num"] = rate_limit_num + if sasl is not unset: + kwargs["sasl"] = sasl + if socket_timeout_ms is not unset: + kwargs["socket_timeout_ms"] = socket_timeout_ms + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.encoding = encoding + self_.id = id + self_.inputs = inputs + self_.topic = topic + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py new file mode 100644 index 0000000000..3cd49d2960 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationCompression(ModelSimple): + """ + Compression codec for Kafka messages. + + :param value: Must be one of ["none", "gzip", "snappy", "lz4", "zstd"]. + :type value: str + """ + + allowed_values = { + "none", + "gzip", + "snappy", + "lz4", + "zstd", + } + NONE: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + GZIP: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + SNAPPY: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + LZ4: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + ZSTD: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationCompression.NONE = ObservabilityPipelineKafkaDestinationCompression("none") +ObservabilityPipelineKafkaDestinationCompression.GZIP = ObservabilityPipelineKafkaDestinationCompression("gzip") +ObservabilityPipelineKafkaDestinationCompression.SNAPPY = ObservabilityPipelineKafkaDestinationCompression("snappy") +ObservabilityPipelineKafkaDestinationCompression.LZ4 = ObservabilityPipelineKafkaDestinationCompression("lz4") +ObservabilityPipelineKafkaDestinationCompression.ZSTD = ObservabilityPipelineKafkaDestinationCompression("zstd") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py new file mode 100644 index 0000000000..99db79c36e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: Must be one of ["json", "raw_message"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + } + JSON: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationEncoding.JSON = ObservabilityPipelineKafkaDestinationEncoding("json") +ObservabilityPipelineKafkaDestinationEncoding.RAW_MESSAGE = ObservabilityPipelineKafkaDestinationEncoding("raw_message") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py new file mode 100644 index 0000000000..e2e290b169 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationType(ModelSimple): + """ + The destination type. The value should always be `kafka`. + + :param value: If omitted defaults to "kafka". Must be one of ["kafka"]. + :type value: str + """ + + allowed_values = { + "kafka", + } + KAFKA: ClassVar["ObservabilityPipelineKafkaDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationType.KAFKA = ObservabilityPipelineKafkaDestinationType("kafka") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py similarity index 84% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py index 4099a196cd..a7e29aa09b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py @@ -10,7 +10,7 @@ ) -class ObservabilityPipelineKafkaSourceLibrdkafkaOption(ModelNormal): +class ObservabilityPipelineKafkaLibrdkafkaOption(ModelNormal): @cached_property def openapi_types(_): return { @@ -25,7 +25,7 @@ def openapi_types(_): def __init__(self_, name: str, value: str, **kwargs): """ - Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. + Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. :param name: The name of the ``librdkafka`` configuration option to set. :type name: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py similarity index 59% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py index 88f6e0aaab..e6d42bc1c8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py @@ -14,34 +14,32 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) -class ObservabilityPipelineKafkaSourceSasl(ModelNormal): +class ObservabilityPipelineKafkaSasl(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) return { - "mechanism": (ObservabilityPipelinePipelineKafkaSourceSaslMechanism,), + "mechanism": (ObservabilityPipelineKafkaSaslMechanism,), } attribute_map = { "mechanism": "mechanism", } - def __init__( - self_, mechanism: Union[ObservabilityPipelinePipelineKafkaSourceSaslMechanism, UnsetType] = unset, **kwargs - ): + def __init__(self_, mechanism: Union[ObservabilityPipelineKafkaSaslMechanism, UnsetType] = unset, **kwargs): """ Specifies the SASL mechanism for authenticating with a Kafka cluster. :param mechanism: SASL mechanism used for Kafka authentication. - :type mechanism: ObservabilityPipelinePipelineKafkaSourceSaslMechanism, optional + :type mechanism: ObservabilityPipelineKafkaSaslMechanism, optional """ if mechanism is not unset: kwargs["mechanism"] = mechanism diff --git a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py similarity index 50% rename from src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py index db414b0a6f..2f261fd1aa 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py @@ -12,7 +12,7 @@ from typing import ClassVar -class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): +class ObservabilityPipelineKafkaSaslMechanism(ModelSimple): """ SASL mechanism used for Kafka authentication. @@ -25,9 +25,9 @@ class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): "SCRAM-SHA-256", "SCRAM-SHA-512", } - PLAIN: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] + PLAIN: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] @cached_property def openapi_types(_): @@ -36,12 +36,6 @@ def openapi_types(_): } -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.PLAIN = ObservabilityPipelinePipelineKafkaSourceSaslMechanism( - "PLAIN" -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_256 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256") -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_512 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512") -) +ObservabilityPipelineKafkaSaslMechanism.PLAIN = ObservabilityPipelineKafkaSaslMechanism("PLAIN") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_256 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_512 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py index 3ba6cfe651..bb5ee4cb79 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py @@ -14,12 +14,10 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -29,12 +27,10 @@ class ObservabilityPipelineKafkaSource(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -43,8 +39,8 @@ def openapi_types(_): return { "group_id": (str,), "id": (str,), - "librdkafka_options": ([ObservabilityPipelineKafkaSourceLibrdkafkaOption],), - "sasl": (ObservabilityPipelineKafkaSourceSasl,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "sasl": (ObservabilityPipelineKafkaSasl,), "tls": (ObservabilityPipelineTls,), "topics": ([str],), "type": (ObservabilityPipelineKafkaSourceType,), @@ -66,8 +62,8 @@ def __init__( id: str, topics: List[str], type: ObservabilityPipelineKafkaSourceType, - librdkafka_options: Union[List[ObservabilityPipelineKafkaSourceLibrdkafkaOption], UnsetType] = unset, - sasl: Union[ObservabilityPipelineKafkaSourceSasl, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): @@ -81,10 +77,10 @@ def __init__( :type id: str :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional + :type sasl: ObservabilityPipelineKafkaSasl, optional :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py new file mode 100644 index 0000000000..f938e3570f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py @@ -0,0 +1,83 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + +class ObservabilityPipelineOpentelemetrySource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + return { + "grpc_address_key": (str,), + "http_address_key": (str,), + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineOpentelemetrySourceType,), + } + + attribute_map = { + "grpc_address_key": "grpc_address_key", + "http_address_key": "http_address_key", + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineOpentelemetrySourceType, + grpc_address_key: Union[str, UnsetType] = unset, + http_address_key: Union[str, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``opentelemetry`` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``opentelemetry``. + :type type: ObservabilityPipelineOpentelemetrySourceType + """ + if grpc_address_key is not unset: + kwargs["grpc_address_key"] = grpc_address_key + if http_address_key is not unset: + kwargs["http_address_key"] = http_address_key + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py new file mode 100644 index 0000000000..ec476d0dba --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOpentelemetrySourceType(ModelSimple): + """ + The source type. The value should always be `opentelemetry`. + + :param value: If omitted defaults to "opentelemetry". Must be one of ["opentelemetry"]. + :type value: str + """ + + allowed_values = { + "opentelemetry", + } + OPENTELEMETRY: ClassVar["ObservabilityPipelineOpentelemetrySourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOpentelemetrySourceType.OPENTELEMETRY = ObservabilityPipelineOpentelemetrySourceType( + "opentelemetry" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py new file mode 100644 index 0000000000..afb330390f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py @@ -0,0 +1,148 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + +class ObservabilityPipelineParseXMLProcessor(ModelNormal): + validations = { + "text_key": { + "min_length": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + return { + "always_use_text_key": (bool,), + "attr_prefix": (str,), + "display_name": (str,), + "enabled": (bool,), + "field": (str,), + "id": (str,), + "include": (str,), + "include_attr": (bool,), + "parse_bool": (bool,), + "parse_null": (bool,), + "parse_number": (bool,), + "text_key": (str,), + "type": (ObservabilityPipelineParseXMLProcessorType,), + } + + attribute_map = { + "always_use_text_key": "always_use_text_key", + "attr_prefix": "attr_prefix", + "display_name": "display_name", + "enabled": "enabled", + "field": "field", + "id": "id", + "include": "include", + "include_attr": "include_attr", + "parse_bool": "parse_bool", + "parse_null": "parse_null", + "parse_number": "parse_number", + "text_key": "text_key", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + field: str, + id: str, + include: str, + type: ObservabilityPipelineParseXMLProcessorType, + always_use_text_key: Union[bool, UnsetType] = unset, + attr_prefix: Union[str, UnsetType] = unset, + display_name: Union[str, UnsetType] = unset, + include_attr: Union[bool, UnsetType] = unset, + parse_bool: Union[bool, UnsetType] = unset, + parse_null: Union[bool, UnsetType] = unset, + parse_number: Union[bool, UnsetType] = unset, + text_key: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``parse_xml`` processor parses XML from a specified field and extracts it into the event. + + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param field: The name of the log field that contains an XML string. + :type field: str + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param type: The processor type. The value should always be ``parse_xml``. + :type type: ObservabilityPipelineParseXMLProcessorType + """ + if always_use_text_key is not unset: + kwargs["always_use_text_key"] = always_use_text_key + if attr_prefix is not unset: + kwargs["attr_prefix"] = attr_prefix + if display_name is not unset: + kwargs["display_name"] = display_name + if include_attr is not unset: + kwargs["include_attr"] = include_attr + if parse_bool is not unset: + kwargs["parse_bool"] = parse_bool + if parse_null is not unset: + kwargs["parse_null"] = parse_null + if parse_number is not unset: + kwargs["parse_number"] = parse_number + if text_key is not unset: + kwargs["text_key"] = text_key + super().__init__(kwargs) + + self_.enabled = enabled + self_.field = field + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py new file mode 100644 index 0000000000..5e8f0a8285 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineParseXMLProcessorType(ModelSimple): + """ + The processor type. The value should always be `parse_xml`. + + :param value: If omitted defaults to "parse_xml". Must be one of ["parse_xml"]. + :type value: str + """ + + allowed_values = { + "parse_xml", + } + PARSE_XML: ClassVar["ObservabilityPipelineParseXMLProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineParseXMLProcessorType.PARSE_XML = ObservabilityPipelineParseXMLProcessorType("parse_xml") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py index a122a03915..af72c324cd 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py @@ -56,6 +56,7 @@ def openapi_types(_): "overflow_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "overrides": ([ObservabilityPipelineQuotaProcessorOverride],), "partition_fields": ([str],), + "too_many_buckets_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "type": (ObservabilityPipelineQuotaProcessorType,), } @@ -71,6 +72,7 @@ def openapi_types(_): "overflow_action": "overflow_action", "overrides": "overrides", "partition_fields": "partition_fields", + "too_many_buckets_action": "too_many_buckets_action", "type": "type", } @@ -88,6 +90,7 @@ def __init__( overflow_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, overrides: Union[List[ObservabilityPipelineQuotaProcessorOverride], UnsetType] = unset, partition_fields: Union[List[str], UnsetType] = unset, + too_many_buckets_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, **kwargs, ): """ @@ -96,7 +99,7 @@ def __init__( :param display_name: The display name for a component. :type display_name: str, optional - :param drop_events: If set to ``true`` , logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param drop_events: If set to ``true`` , logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note** : You can set either ``drop_events`` or ``overflow_action`` , but not both. :type drop_events: bool, optional :param enabled: Whether this processor is enabled. @@ -117,7 +120,7 @@ def __init__( :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: * ``drop`` : Drop the event. * ``no_action`` : Let the event pass through. @@ -130,6 +133,13 @@ def __init__( :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + + * ``drop`` : Drop the event. + * ``no_action`` : Let the event pass through. + * ``overflow_routing`` : Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param type: The processor type. The value should always be ``quota``. :type type: ObservabilityPipelineQuotaProcessorType """ @@ -145,6 +155,8 @@ def __init__( kwargs["overrides"] = overrides if partition_fields is not unset: kwargs["partition_fields"] = partition_fields + if too_many_buckets_action is not unset: + kwargs["too_many_buckets_action"] = too_many_buckets_action super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py index 1341d5654d..3bf3e7727d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py @@ -14,7 +14,7 @@ class ObservabilityPipelineQuotaProcessorOverflowAction(ModelSimple): """ - The action to take when the quota is exceeded. Options: + The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py index b301a81a73..96cf8f7a1b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -3,7 +3,7 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import Union, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, @@ -21,8 +21,8 @@ class ObservabilityPipelineSampleProcessor(ModelNormal): validations = { - "rate": { - "inclusive_minimum": 1, + "group_by": { + "min_items": 1, }, } @@ -35,20 +35,20 @@ def openapi_types(_): return { "display_name": (str,), "enabled": (bool,), + "group_by": ([str],), "id": (str,), "include": (str,), "percentage": (float,), - "rate": (int,), "type": (ObservabilityPipelineSampleProcessorType,), } attribute_map = { "display_name": "display_name", "enabled": "enabled", + "group_by": "group_by", "id": "id", "include": "include", "percentage": "percentage", - "rate": "rate", "type": "type", } @@ -57,10 +57,10 @@ def __init__( enabled: bool, id: str, include: str, + percentage: float, type: ObservabilityPipelineSampleProcessorType, display_name: Union[str, UnsetType] = unset, - percentage: Union[float, UnsetType] = unset, - rate: Union[int, UnsetType] = unset, + group_by: Union[List[str], UnsetType] = unset, **kwargs, ): """ @@ -72,6 +72,9 @@ def __init__( :param enabled: Whether this processor is enabled. :type enabled: bool + :param group_by: Optional list of fields to group events by. Each group is sampled independently. + :type group_by: [str], optional + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str @@ -79,23 +82,19 @@ def __init__( :type include: str :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional + :type percentage: float :param type: The processor type. The value should always be ``sample``. :type type: ObservabilityPipelineSampleProcessorType """ if display_name is not unset: kwargs["display_name"] = display_name - if percentage is not unset: - kwargs["percentage"] = percentage - if rate is not unset: - kwargs["rate"] = rate + if group_by is not unset: + kwargs["group_by"] = group_by super().__init__(kwargs) self_.enabled = enabled self_.id = id self_.include = include + self_.percentage = percentage self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py index da99100de6..6b7a504c70 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py @@ -3,10 +3,13 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations +from typing import Union from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) @@ -14,20 +17,27 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions(Mod @cached_property def openapi_types(_): return { + "description": (str,), "rule": (str,), } attribute_map = { + "description": "description", "rule": "rule", } - def __init__(self_, rule: str, **kwargs): + def __init__(self_, rule: str, description: Union[str, UnsetType] = unset, **kwargs): """ Options for defining a custom regex pattern. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param rule: A regular expression used to detect sensitive values. Must be a valid regex. :type rule: str """ + if description is not unset: + kwargs["description"] = description super().__init__(kwargs) self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py index 1389dec5a1..91ac818cdf 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py @@ -17,25 +17,38 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions(Mo @cached_property def openapi_types(_): return { + "description": (str,), "id": (str,), "use_recommended_keywords": (bool,), } attribute_map = { + "description": "description", "id": "id", "use_recommended_keywords": "use_recommended_keywords", } - def __init__(self_, id: str, use_recommended_keywords: Union[bool, UnsetType] = unset, **kwargs): + def __init__( + self_, + id: str, + description: Union[str, UnsetType] = unset, + use_recommended_keywords: Union[bool, UnsetType] = unset, + **kwargs, + ): """ Options for selecting a predefined library pattern and enabling keyword support. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param id: Identifier for a predefined pattern from the sensitive data scanner pattern library. :type id: str :param use_recommended_keywords: Whether to augment the pattern with recommended keywords (optional). :type use_recommended_keywords: bool, optional """ + if description is not unset: + kwargs["description"] = description if use_recommended_keywords is not unset: kwargs["use_recommended_keywords"] = use_recommended_keywords super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py new file mode 100644 index 0000000000..844e8b4ab2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py @@ -0,0 +1,99 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + +class ObservabilityPipelineSplitArrayProcessor(ModelNormal): + validations = { + "arrays": { + "max_items": 15, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + return { + "arrays": ([ObservabilityPipelineSplitArrayProcessorArrayConfig],), + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineSplitArrayProcessorType,), + } + + attribute_map = { + "arrays": "arrays", + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + arrays: List[ObservabilityPipelineSplitArrayProcessorArrayConfig], + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineSplitArrayProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``split_array`` processor splits array fields into separate events based on configured rules. + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be ``*``. + :type include: str + + :param type: The processor type. The value should always be ``split_array``. + :type type: ObservabilityPipelineSplitArrayProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.arrays = arrays + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py new file mode 100644 index 0000000000..ac7133ee2c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSplitArrayProcessorArrayConfig(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "include": (str,), + } + + attribute_map = { + "field": "field", + "include": "include", + } + + def __init__(self_, field: str, include: str, **kwargs): + """ + Configuration for a single array split operation. + + :param field: The path to the array field to split. + :type field: str + + :param include: A Datadog search query used to determine which logs this array split operation targets. + :type include: str + """ + super().__init__(kwargs) + + self_.field = field + self_.include = include diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py new file mode 100644 index 0000000000..c6b6a7e9a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplitArrayProcessorType(ModelSimple): + """ + The processor type. The value should always be `split_array`. + + :param value: If omitted defaults to "split_array". Must be one of ["split_array"]. + :type value: str + """ + + allowed_values = { + "split_array", + } + SPLIT_ARRAY: ClassVar["ObservabilityPipelineSplitArrayProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY = ObservabilityPipelineSplitArrayProcessorType("split_array") diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 39ee6ab446..c797118593 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -2886,6 +2886,12 @@ from datadog_api_client.v2.model.observability_pipeline_add_fields_processor_type import ( ObservabilityPipelineAddFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( ObservabilityPipelineAmazonDataFirehoseSource, ) @@ -2924,6 +2930,12 @@ ObservabilityPipelineAmazonSecurityLakeDestinationType, ) from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, +) +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, @@ -2997,6 +3009,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, +) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -3030,6 +3045,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, +) from datadog_api_client.v2.model.observability_pipeline_field_value import ObservabilityPipelineFieldValue from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( @@ -3116,11 +3134,24 @@ from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( ObservabilityPipelineHttpServerSourceType, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource -from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, +from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ObservabilityPipelineKafkaDestination +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ObservabilityPipelineKafkaSourceSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ObservabilityPipelineKafkaSourceType from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( @@ -3158,6 +3189,12 @@ from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, +) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, +) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) @@ -3179,8 +3216,11 @@ from datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type import ( ObservabilityPipelineParseJSONProcessorType, ) -from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit import ( @@ -3402,6 +3442,15 @@ from datadog_api_client.v2.model.observability_pipeline_socket_source_type import ObservabilityPipelineSocketSourceType from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData +from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( ObservabilityPipelineSplunkHecDestination, ) @@ -7366,6 +7415,8 @@ "ObservabilityPipelineAddEnvVarsProcessorVariable", "ObservabilityPipelineAddFieldsProcessor", "ObservabilityPipelineAddFieldsProcessorType", + "ObservabilityPipelineAddHostnameProcessor", + "ObservabilityPipelineAddHostnameProcessorType", "ObservabilityPipelineAmazonDataFirehoseSource", "ObservabilityPipelineAmazonDataFirehoseSourceType", "ObservabilityPipelineAmazonOpenSearchDestination", @@ -7380,6 +7431,8 @@ "ObservabilityPipelineAmazonSecurityLakeDestination", "ObservabilityPipelineAmazonSecurityLakeDestinationType", "ObservabilityPipelineAwsAuth", + "ObservabilityPipelineCloudPremDestination", + "ObservabilityPipelineCloudPremDestinationType", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", "ObservabilityPipelineConfigProcessorGroup", @@ -7409,6 +7462,7 @@ "ObservabilityPipelineDedupeProcessorType", "ObservabilityPipelineElasticsearchDestination", "ObservabilityPipelineElasticsearchDestinationApiVersion", + "ObservabilityPipelineElasticsearchDestinationDataStream", "ObservabilityPipelineElasticsearchDestinationType", "ObservabilityPipelineEnrichmentTableFile", "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -7420,6 +7474,7 @@ "ObservabilityPipelineEnrichmentTableGeoIp", "ObservabilityPipelineEnrichmentTableProcessor", "ObservabilityPipelineEnrichmentTableProcessorType", + "ObservabilityPipelineEnrichmentTableReferenceTable", "ObservabilityPipelineFieldValue", "ObservabilityPipelineFilterProcessor", "ObservabilityPipelineFilterProcessorType", @@ -7454,9 +7509,14 @@ "ObservabilityPipelineHttpServerSource", "ObservabilityPipelineHttpServerSourceAuthStrategy", "ObservabilityPipelineHttpServerSourceType", + "ObservabilityPipelineKafkaDestination", + "ObservabilityPipelineKafkaDestinationCompression", + "ObservabilityPipelineKafkaDestinationEncoding", + "ObservabilityPipelineKafkaDestinationType", + "ObservabilityPipelineKafkaLibrdkafkaOption", + "ObservabilityPipelineKafkaSasl", + "ObservabilityPipelineKafkaSaslMechanism", "ObservabilityPipelineKafkaSource", - "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "ObservabilityPipelineKafkaSourceSasl", "ObservabilityPipelineKafkaSourceType", "ObservabilityPipelineLogstashSource", "ObservabilityPipelineLogstashSourceType", @@ -7472,6 +7532,8 @@ "ObservabilityPipelineOcsfMappingLibrary", "ObservabilityPipelineOpenSearchDestination", "ObservabilityPipelineOpenSearchDestinationType", + "ObservabilityPipelineOpentelemetrySource", + "ObservabilityPipelineOpentelemetrySourceType", "ObservabilityPipelineParseGrokProcessor", "ObservabilityPipelineParseGrokProcessorRule", "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -7479,7 +7541,8 @@ "ObservabilityPipelineParseGrokProcessorType", "ObservabilityPipelineParseJSONProcessor", "ObservabilityPipelineParseJSONProcessorType", - "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "ObservabilityPipelineParseXMLProcessor", + "ObservabilityPipelineParseXMLProcessorType", "ObservabilityPipelineQuotaProcessor", "ObservabilityPipelineQuotaProcessorLimit", "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -7560,6 +7623,9 @@ "ObservabilityPipelineSocketSourceType", "ObservabilityPipelineSpec", "ObservabilityPipelineSpecData", + "ObservabilityPipelineSplitArrayProcessor", + "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "ObservabilityPipelineSplitArrayProcessorType", "ObservabilityPipelineSplunkHecDestination", "ObservabilityPipelineSplunkHecDestinationEncoding", "ObservabilityPipelineSplunkHecDestinationType", diff --git a/tests/v2/features/given.json b/tests/v2/features/given.json index aa08181879..3a01a060c5 100644 --- a/tests/v2/features/given.json +++ b/tests/v2/features/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processor_groups\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/tests/v2/features/observability_pipelines.feature b/tests/v2/features/observability_pipelines.feature index c43fa8b3b7..afe602c3ba 100644 --- a/tests/v2/features/observability_pipelines.feature +++ b/tests/v2/features/observability_pipelines.feature @@ -12,7 +12,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Bad Request" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -28,7 +28,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "OK" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 201 OK And the response "data" has field "id" @@ -106,7 +106,7 @@ Feature: Observability Pipelines And new "UpdatePipeline" request And there is a valid "pipeline" in the system And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -124,7 +124,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 404 Not Found @@ -134,7 +134,7 @@ Feature: Observability Pipelines And there is a valid "pipeline" in the system And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "data" has field "id" @@ -149,7 +149,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "Bad Request" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request And the response "errors[0].title" is equal to "Field 'include' is required" @@ -161,7 +161,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "OK" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "errors" has length 0 diff --git a/tests/v2/features/undo.json b/tests/v2/features/undo.json index eb43f26dbe..25c59473da 100644 --- a/tests/v2/features/undo.json +++ b/tests/v2/features/undo.json @@ -2838,6 +2838,31 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3443,31 +3468,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": {