From f84952b09d4d4595119a2b8b38e4159504a62e9e Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Tue, 6 Jan 2026 14:46:46 +0000 Subject: [PATCH] Regenerate client from commit d3adaf1 of spec repo --- .generator/schemas/v2/openapi.yaml | 533 ++++++++++++------ docs/datadog_api_client.v2.model.rst | 110 +++- .../v2/api/dora_metrics_api.py | 30 +- .../v2/model/azure_storage_destination.py | 25 + .../model/dora_deployment_fetch_response.py | 42 ++ .../v2/model/dora_deployment_object.py | 64 +++ .../dora_deployment_object_attributes.py | 101 ++++ .../model/dora_deployments_list_response.py | 42 ++ src/datadog_api_client/v2/model/dora_event.py | 56 -- ...onse.py => dora_failure_fetch_response.py} | 16 +- ...onse.py => dora_failures_list_response.py} | 16 +- .../v2/model/dora_incident_object.py | 64 +++ .../model/dora_incident_object_attributes.py | 119 ++++ .../model/microsoft_sentinel_destination.py | 29 +- ...pipeline_amazon_open_search_destination.py | 25 + ...vability_pipeline_amazon_s3_destination.py | 25 + ...peline_amazon_security_lake_destination.py | 25 + .../observability_pipeline_buffer_options.py | 57 ++ ...ility_pipeline_buffer_options_disk_type.py | 35 ++ ...ity_pipeline_buffer_options_memory_type.py | 35 ++ ...ility_pipeline_buffer_options_when_full.py | 38 ++ ...bility_pipeline_config_destination_item.py | 3 + ..._crowd_strike_next_gen_siem_destination.py | 25 + ...ility_pipeline_datadog_logs_destination.py | 37 +- ...ervability_pipeline_disk_buffer_options.py | 72 +++ ...lity_pipeline_elasticsearch_destination.py | 25 + ...y_pipeline_google_chronicle_destination.py | 25 + ...peline_google_cloud_storage_destination.py | 25 + ...ity_pipeline_google_pub_sub_destination.py | 25 + ...vability_pipeline_memory_buffer_options.py | 58 ++ ...ity_pipeline_memory_buffer_size_options.py | 58 ++ ...vability_pipeline_new_relic_destination.py | 29 +- ...bility_pipeline_open_search_destination.py | 25 + ...ervability_pipeline_rsyslog_destination.py | 25 + ...ility_pipeline_sentinel_one_destination.py | 29 +- ...servability_pipeline_socket_destination.py | 25 + ...ability_pipeline_splunk_hec_destination.py | 25 + ...ability_pipeline_sumo_logic_destination.py | 25 + ...vability_pipeline_syslog_ng_destination.py | 25 + src/datadog_api_client/v2/models/__init__.py | 48 +- ...peline_returns_bad_request_response.frozen | 2 +- ..._a_new_pipeline_returns_ok_response.frozen | 2 +- ...te_a_new_pipeline_returns_ok_response.yaml | 4 +- ...pipeline_returns_not_found_response.frozen | 2 +- ...lete_a_pipeline_returns_ok_response.frozen | 2 +- ...delete_a_pipeline_returns_ok_response.yaml | 6 +- ...ecific_pipeline_returns_ok_response.frozen | 2 +- ...specific_pipeline_returns_ok_response.yaml | 8 +- ...elines_returns_bad_request_response.frozen | 2 +- ..._list_pipelines_returns_ok_response.frozen | 2 +- ...st_list_pipelines_returns_ok_response.yaml | 24 +- ...peline_returns_bad_request_response.frozen | 2 +- ...pipeline_returns_bad_request_response.yaml | 6 +- ...pipeline_returns_not_found_response.frozen | 2 +- ...date_a_pipeline_returns_ok_response.frozen | 2 +- ...update_a_pipeline_returns_ok_response.yaml | 8 +- ...peline_returns_bad_request_response.frozen | 2 +- ...bility_pipeline_returns_ok_response.frozen | 2 +- 58 files changed, 1861 insertions(+), 315 deletions(-) create mode 100644 src/datadog_api_client/v2/model/dora_deployment_fetch_response.py create mode 100644 src/datadog_api_client/v2/model/dora_deployment_object.py create mode 100644 src/datadog_api_client/v2/model/dora_deployment_object_attributes.py create mode 100644 src/datadog_api_client/v2/model/dora_deployments_list_response.py delete mode 100644 src/datadog_api_client/v2/model/dora_event.py rename src/datadog_api_client/v2/model/{dora_fetch_response.py => dora_failure_fetch_response.py} (58%) rename src/datadog_api_client/v2/model/{dora_list_response.py => dora_failures_list_response.py} (57%) create mode 100644 src/datadog_api_client/v2/model/dora_incident_object.py create mode 100644 src/datadog_api_client/v2/model/dora_incident_object_attributes.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_buffer_options_when_full.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py create mode 100644 src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b7484..00712ceccf 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6684,6 +6684,8 @@ components: description: Optional prefix for blobs written to the container. example: logs/ type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' container_name: description: The name of the Azure Blob Storage container to store logs in. @@ -15878,6 +15880,78 @@ components: type: string nullable: true type: array + DORADeploymentFetchResponse: + description: Response for fetching a single deployment event. + properties: + data: + $ref: '#/components/schemas/DORADeploymentObject' + type: object + DORADeploymentObject: + description: A DORA deployment event. + example: + attributes: + custom_tags: + - language:java + - department:engineering + - region:us-east-1 + env: production + finished_at: 1693491984000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + service: shopist + started_at: 1693491974000000000 + team: backend + version: v1.12.07 + id: 4242fcdd31586083 + type: dora_deployment + properties: + attributes: + $ref: '#/components/schemas/DORADeploymentObjectAttributes' + id: + description: The ID of the deployment event. + type: string + type: + $ref: '#/components/schemas/DORADeploymentType' + type: object + DORADeploymentObjectAttributes: + description: The attributes of the deployment event. + properties: + custom_tags: + $ref: '#/components/schemas/DORACustomTags' + env: + description: Environment name to where the service was deployed. + example: production + type: string + finished_at: + description: Unix timestamp when the deployment finished. + example: 1693491984000000000 + format: int64 + type: integer + git: + $ref: '#/components/schemas/DORAGitInfo' + service: + description: Service name. + example: shopist + type: string + started_at: + description: Unix timestamp when the deployment started. + example: 1693491974000000000 + format: int64 + type: integer + team: + description: Name of the team owning the deployed service. + example: backend + type: string + version: + description: Version to correlate with APM Deployment Tracking. + example: v1.12.07 + type: string + required: + - service + - started_at + - finished_at + type: object DORADeploymentRequest: description: Request to create a DORA deployment event. properties: @@ -15968,18 +16042,53 @@ components: type: string x-enum-varnames: - DORA_DEPLOYMENT - DORAEvent: - description: A DORA event. + DORADeploymentsListResponse: + description: Response for the list deployments endpoint. + example: + data: + - attributes: + custom_tags: + - language:java + - department:engineering + - region:us-east-1 + env: production + finished_at: 1693491984000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + service: shopist + started_at: 1693491974000000000 + team: backend + version: v1.12.07 + id: 4242fcdd31586083 + type: dora_deployment + - attributes: + custom_tags: + - language:go + - department:platform + env: production + finished_at: 1693492084000000000 + git: + commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 + repository_url: https://github.com/organization/api-service + service: api-service + started_at: 1693492074000000000 + team: backend + version: v2.1.0 + id: 4242fcdd31586084 + type: dora_deployment properties: - attributes: - description: The attributes of the event. - type: object - id: - description: The ID of the event. - type: string - type: - description: The type of the event. - type: string + data: + description: The list of DORA deployment events. + items: + $ref: '#/components/schemas/DORADeploymentObject' + type: array + type: object + DORAFailureFetchResponse: + description: Response for fetching a single failure event. + properties: + data: + $ref: '#/components/schemas/DORAIncidentObject' type: object DORAFailureRequest: description: Request to create a DORA failure event. @@ -16083,11 +16192,45 @@ components: type: string x-enum-varnames: - DORA_FAILURE - DORAFetchResponse: - description: Response for the DORA fetch endpoints. + DORAFailuresListResponse: + description: Response for the list failures endpoint. + example: + data: + - attributes: + custom_tags: + - incident_type:database + - department:engineering + env: production + finished_at: 1693492274000000000 + name: Database outage + services: + - shopist + severity: SEV-1 + started_at: 1693492174000000000 + team: backend + id: 4242fcdd31586085 + type: dora_incident + - attributes: + custom_tags: + - incident_type:service_down + - department:platform + env: production + finished_at: 1693492474000000000 + name: API service outage + services: + - api-service + - payment-service + severity: SEV-2 + started_at: 1693492374000000000 + team: backend + id: 4242fcdd31586086 + type: dora_incident properties: data: - $ref: '#/components/schemas/DORAEvent' + description: The list of DORA incident events. + items: + $ref: '#/components/schemas/DORAIncidentObject' + type: array type: object DORAGitInfo: description: Git info for DORA Metrics events. @@ -16100,6 +16243,82 @@ components: - repository_url - commit_sha type: object + DORAIncidentObject: + description: A DORA incident event. + example: + attributes: + custom_tags: + - incident_type:database + - department:engineering + env: production + finished_at: 1693492274000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + name: Database outage + services: + - shopist + severity: SEV-1 + started_at: 1693492174000000000 + team: backend + id: 4242fcdd31586085 + type: dora_incident + properties: + attributes: + $ref: '#/components/schemas/DORAIncidentObjectAttributes' + id: + description: The ID of the incident event. + type: string + type: + $ref: '#/components/schemas/DORAFailureType' + type: object + DORAIncidentObjectAttributes: + description: The attributes of the incident event. + properties: + custom_tags: + $ref: '#/components/schemas/DORACustomTags' + env: + description: Environment name that was impacted by the incident. + example: production + type: string + finished_at: + description: Unix timestamp when the incident finished. + example: 1693491984000000000 + format: int64 + type: integer + git: + $ref: '#/components/schemas/DORAGitInfo' + name: + description: Incident name. + example: Database outage + type: string + services: + description: Service names impacted by the incident. + example: + - shopist + items: + type: string + type: array + severity: + description: Incident severity. + example: SEV-1 + type: string + started_at: + description: Unix timestamp when the incident started. + example: 1693491974000000000 + format: int64 + type: integer + team: + description: Name of the team owning the services impacted. + example: backend + type: string + version: + description: Version to correlate with APM Deployment Tracking. + example: v1.12.07 + type: string + required: + - started_at + type: object DORAListDeploymentsRequest: description: Request to get a list of deployments. example: @@ -16119,32 +16338,31 @@ components: type: object DORAListDeploymentsRequestAttributes: description: Attributes to get a list of deployments. - example: - from: '2025-01-01T00:00:00Z' - limit: 500 - query: service:(shopist OR api-service OR payment-service) env:(production - OR staging) team:(backend OR platform) - sort: -started_at - to: '2025-01-31T23:59:59Z' properties: from: description: Minimum timestamp for requested events. + example: '2025-01-01T00:00:00Z' format: date-time type: string limit: default: 10 description: Maximum number of events in the response. + example: 500 format: int32 maximum: 1000 type: integer query: description: Search query with event platform syntax. + example: service:(shopist OR api-service OR payment-service) env:(production + OR staging) team:(backend OR platform) type: string sort: description: Sort order (prefixed with `-` for descending). + example: -started_at type: string to: description: Maximum timestamp for requested events. + example: '2025-01-31T23:59:59Z' format: date-time type: string type: object @@ -16167,9 +16385,11 @@ components: - attributes type: object DORAListDeploymentsRequestDataType: + default: dora_deployments_list_request description: The definition of `DORAListDeploymentsRequestDataType` object. enum: - dora_deployments_list_request + example: dora_deployments_list_request type: string x-enum-varnames: - DORA_DEPLOYMENTS_LIST_REQUEST @@ -16192,32 +16412,31 @@ components: type: object DORAListFailuresRequestAttributes: description: Attributes to get a list of failures. - example: - from: '2025-01-01T00:00:00Z' - limit: 500 - query: severity:(SEV-1 OR SEV-2) env:(production OR staging) service:(shopist - OR api-service OR payment-service) team:(backend OR platform OR payments) - sort: -started_at - to: '2025-01-31T23:59:59Z' properties: from: description: Minimum timestamp for requested events. + example: '2025-01-01T00:00:00Z' format: date-time type: string limit: default: 10 description: Maximum number of events in the response. + example: 500 format: int32 maximum: 1000 type: integer query: description: Search query with event platform syntax. + example: severity:(SEV-1 OR SEV-2) env:(production OR staging) service:(shopist + OR api-service OR payment-service) team:(backend OR platform OR payments) type: string sort: description: Sort order (prefixed with `-` for descending). + example: -started_at type: string to: description: Maximum timestamp for requested events. + example: '2025-01-31T23:59:59Z' format: date-time type: string type: object @@ -16240,54 +16459,14 @@ components: - attributes type: object DORAListFailuresRequestDataType: + default: dora_failures_list_request description: The definition of `DORAListFailuresRequestDataType` object. enum: - dora_failures_list_request + example: dora_failures_list_request type: string x-enum-varnames: - DORA_FAILURES_LIST_REQUEST - DORAListResponse: - description: Response for the DORA list endpoints. - example: - data: - - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: production - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - service: shopist - started_at: 1693491974000000000 - team: backend - version: v1.12.07 - id: 4242fcdd31586083 - type: dora_deployment - - attributes: - custom_tags: - - language:go - - department:platform - env: production - finished_at: 1693492084000000000 - git: - commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 - repository_url: https://github.com/organization/api-service - service: api-service - started_at: 1693492074000000000 - team: backend - version: v2.1.0 - id: 4242fcdd31586084 - type: dora_deployment - properties: - data: - description: The list of DORA events. - items: - $ref: '#/components/schemas/DORAEvent' - type: array - type: object DashboardListAddItemsRequest: description: Request containing a list of dashboards to add. properties: @@ -33334,6 +33513,8 @@ components: description: The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' client_id: description: Azure AD client ID used for authentication. example: a1b2c3d4-5678-90ab-cdef-1234567890ab @@ -35211,6 +35392,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -35287,6 +35470,8 @@ components: description: S3 bucket name. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: amazon-s3-destination @@ -35398,6 +35583,8 @@ components: description: Name of the Amazon S3 bucket in Security Lake (3-63 characters). example: security-lake-bucket type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' custom_source_name: description: Custom source name for the logs in Security Lake. example: my-custom-source @@ -35455,6 +35642,39 @@ components: role session. type: string type: object + ObservabilityPipelineBufferOptions: + description: Configuration for buffer settings on destination components. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions' + ObservabilityPipelineBufferOptionsDiskType: + default: disk + description: The type of the buffer that will be configured, a disk buffer. + enum: + - disk + type: string + x-enum-varnames: + - DISK + ObservabilityPipelineBufferOptionsMemoryType: + default: memory + description: The type of the buffer that will be configured, a memory buffer. + enum: + - memory + type: string + x-enum-varnames: + - MEMORY + ObservabilityPipelineBufferOptionsWhenFull: + default: block + description: Behavior when the buffer is full (block and stop accepting new + events, or drop new events) + enum: + - block + - drop_newest + type: string + x-enum-varnames: + - BLOCK + - DROP_NEWEST ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35641,6 +35861,8 @@ components: description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' encoding: @@ -35849,6 +36071,8 @@ components: ObservabilityPipelineDatadogLogsDestination: description: The `datadog_logs` destination forwards logs to Datadog Log Management. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: datadog-logs-destination @@ -36020,12 +36244,27 @@ components: type: string x-enum-varnames: - DEDUPE + ObservabilityPipelineDiskBufferOptions: + description: Options for configuring a disk buffer. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType' + when_full: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsWhenFull' + type: object ObservabilityPipelineElasticsearchDestination: description: The `elasticsearch` destination writes logs to an Elasticsearch cluster. properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to in Elasticsearch. example: logs-index @@ -36496,6 +36735,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' customer_id: description: The Google Chronicle customer ID. example: abcdefg123456789 @@ -36559,6 +36800,8 @@ components: description: Name of the GCS bucket. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: gcs-destination @@ -36638,6 +36881,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestinationEncoding' id: @@ -36925,6 +37170,28 @@ components: type: string x-enum-varnames: - LOGSTASH + ObservabilityPipelineMemoryBufferOptions: + description: Options for configuring a memory buffer by byte size. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object + ObservabilityPipelineMemoryBufferSizeOptions: + description: Options for configuring a memory buffer by queue length. + properties: + max_events: + description: Maximum events for the memory buffer. + example: 500 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object ObservabilityPipelineMetadataEntry: description: A custom metadata entry. properties: @@ -36948,6 +37215,8 @@ components: ObservabilityPipelineNewRelicDestination: description: The `new_relic` destination sends logs to the New Relic platform. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: new-relic-destination @@ -37084,6 +37353,8 @@ components: ObservabilityPipelineOpenSearchDestination: description: The `opensearch` destination writes logs to an OpenSearch cluster. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -37635,6 +37906,8 @@ components: description: The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: rsyslog-destination @@ -38105,6 +38378,8 @@ components: ObservabilityPipelineSentinelOneDestination: description: The `sentinel_one` destination sends logs to SentinelOne. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: sentinelone-destination @@ -38154,6 +38429,8 @@ components: description: The `socket` destination sends logs over TCP or UDP to a remote server. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' framing: @@ -38453,6 +38730,8 @@ components: If `false`, Splunk assigns the time the event was received.' example: true type: boolean + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: @@ -38562,6 +38841,8 @@ components: ObservabilityPipelineSumoLogicDestination: description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' header_custom_fields: @@ -38665,6 +38946,8 @@ components: description: The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: syslog-ng-destination @@ -67820,7 +68103,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DORAListResponse' + $ref: '#/components/schemas/DORADeploymentsListResponse' description: OK '400': content: @@ -67858,26 +68141,8 @@ paths: '200': content: application/json: - example: - data: - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: staging - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - service: shopist - started_at: 1693491974000000000 - team: backend - version: v1.12.07 - id: 4242fcdd31586083 - type: dora_deployment - schema: - $ref: '#/components/schemas/DORAFetchResponse' + schema: + $ref: '#/components/schemas/DORADeploymentFetchResponse' description: OK '400': content: @@ -67994,66 +68259,8 @@ paths: '200': content: application/json: - example: - data: - - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: production - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - name: Web server is down; all requests are failing. - services: - - shopist - severity: SEV-1 - started_at: 1693491974000000000 - team: backend - id: 4242fcdd31586085 - type: dora_failure - - attributes: - custom_tags: - - language:go - - department:platform - env: production - finished_at: 1693492084000000000 - git: - commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 - repository_url: https://github.com/organization/api-service - name: Database connection timeout - services: - - api-service - - payment-service - severity: SEV-1 - started_at: 1693492074000000000 - team: platform - version: v2.1.0 - id: 4242fcdd31586086 - type: dora_failure - - attributes: - custom_tags: - - language:python - - department:payments - - region:eu-west-1 - env: staging - finished_at: 1693492204000000000 - git: - commit_sha: 99edc9350f2cc9b250b69abddab733dd55e1a601 - repository_url: https://github.com/organization/payment-service - name: Payment gateway API rate limit exceeded - services: - - payment-service - severity: SEV-2 - started_at: 1693492174000000000 - team: payments - version: v1.8.3 - id: 4242fcdd31586087 - type: dora_failure - schema: - $ref: '#/components/schemas/DORAListResponse' + schema: + $ref: '#/components/schemas/DORAFailuresListResponse' description: OK '400': content: @@ -68091,28 +68298,8 @@ paths: '200': content: application/json: - example: - data: - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: staging - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - name: Web server is down; all requests are failing. - services: - - shopist - severity: High - started_at: 1693491974000000000 - team: backend - id: 4242fcdd31586085 - type: dora_failure - schema: - $ref: '#/components/schemas/DORAFetchResponse' + schema: + $ref: '#/components/schemas/DORAFailureFetchResponse' description: OK '400': content: diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 2e859a4f0f..56a17499ab 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -7214,6 +7214,27 @@ datadog\_api\_client.v2.model.domain\_allowlist\_type module :members: :show-inheritance: +datadog\_api\_client.v2.model.dora\_deployment\_fetch\_response module +---------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_deployment_fetch_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.dora\_deployment\_object module +------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_deployment_object + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.dora\_deployment\_object\_attributes module +------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_deployment_object_attributes + :members: + :show-inheritance: + datadog\_api\_client.v2.model.dora\_deployment\_request module -------------------------------------------------------------- @@ -7256,10 +7277,17 @@ datadog\_api\_client.v2.model.dora\_deployment\_type module :members: :show-inheritance: -datadog\_api\_client.v2.model.dora\_event module ------------------------------------------------- +datadog\_api\_client.v2.model.dora\_deployments\_list\_response module +---------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.dora_event +.. automodule:: datadog_api_client.v2.model.dora_deployments_list_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.dora\_failure\_fetch\_response module +------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_failure_fetch_response :members: :show-inheritance: @@ -7305,10 +7333,10 @@ datadog\_api\_client.v2.model.dora\_failure\_type module :members: :show-inheritance: -datadog\_api\_client.v2.model.dora\_fetch\_response module ----------------------------------------------------------- +datadog\_api\_client.v2.model.dora\_failures\_list\_response module +------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.dora_fetch_response +.. automodule:: datadog_api_client.v2.model.dora_failures_list_response :members: :show-inheritance: @@ -7319,6 +7347,20 @@ datadog\_api\_client.v2.model.dora\_git\_info module :members: :show-inheritance: +datadog\_api\_client.v2.model.dora\_incident\_object module +----------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_incident_object + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.dora\_incident\_object\_attributes module +----------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.dora_incident_object_attributes + :members: + :show-inheritance: + datadog\_api\_client.v2.model.dora\_list\_deployments\_request module --------------------------------------------------------------------- @@ -7375,13 +7417,6 @@ datadog\_api\_client.v2.model.dora\_list\_failures\_request\_data\_type module :members: :show-inheritance: -datadog\_api\_client.v2.model.dora\_list\_response module ---------------------------------------------------------- - -.. automodule:: datadog_api_client.v2.model.dora_list_response - :members: - :show-inheritance: - datadog\_api\_client.v2.model.downtime\_create\_request module -------------------------------------------------------------- @@ -15467,6 +15502,34 @@ datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options module +----------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options\_disk\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options\_memory\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options\_when\_full module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options_when_full + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -15656,6 +15719,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_dedupe\_processor\_type m :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_disk\_buffer\_options module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_disk_buffer_options + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination module ---------------------------------------------------------------------------------------- @@ -16020,6 +16090,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_logstash\_source\_type mo :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_memory\_buffer\_options module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_memory_buffer_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_memory\_buffer\_size\_options module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_metadata\_entry module ----------------------------------------------------------------------------- diff --git a/src/datadog_api_client/v2/api/dora_metrics_api.py b/src/datadog_api_client/v2/api/dora_metrics_api.py index 6a9b219cc8..31f81aa5b4 100644 --- a/src/datadog_api_client/v2/api/dora_metrics_api.py +++ b/src/datadog_api_client/v2/api/dora_metrics_api.py @@ -10,12 +10,14 @@ from datadog_api_client.configuration import Configuration from datadog_api_client.v2.model.dora_deployment_response import DORADeploymentResponse from datadog_api_client.v2.model.dora_deployment_request import DORADeploymentRequest -from datadog_api_client.v2.model.dora_list_response import DORAListResponse +from datadog_api_client.v2.model.dora_deployments_list_response import DORADeploymentsListResponse from datadog_api_client.v2.model.dora_list_deployments_request import DORAListDeploymentsRequest -from datadog_api_client.v2.model.dora_fetch_response import DORAFetchResponse +from datadog_api_client.v2.model.dora_deployment_fetch_response import DORADeploymentFetchResponse from datadog_api_client.v2.model.dora_failure_response import DORAFailureResponse from datadog_api_client.v2.model.dora_failure_request import DORAFailureRequest +from datadog_api_client.v2.model.dora_failures_list_response import DORAFailuresListResponse from datadog_api_client.v2.model.dora_list_failures_request import DORAListFailuresRequest +from datadog_api_client.v2.model.dora_failure_fetch_response import DORAFailureFetchResponse class DORAMetricsApi: @@ -138,7 +140,7 @@ def __init__(self, api_client=None): self._get_dora_deployment_endpoint = _Endpoint( settings={ - "response_type": (DORAFetchResponse,), + "response_type": (DORADeploymentFetchResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], "endpoint_path": "/api/v2/dora/deployments/{deployment_id}", "operation_id": "get_dora_deployment", @@ -161,7 +163,7 @@ def __init__(self, api_client=None): self._get_dora_failure_endpoint = _Endpoint( settings={ - "response_type": (DORAFetchResponse,), + "response_type": (DORAFailureFetchResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], "endpoint_path": "/api/v2/dora/failures/{failure_id}", "operation_id": "get_dora_failure", @@ -184,7 +186,7 @@ def __init__(self, api_client=None): self._list_dora_deployments_endpoint = _Endpoint( settings={ - "response_type": (DORAListResponse,), + "response_type": (DORADeploymentsListResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], "endpoint_path": "/api/v2/dora/deployments", "operation_id": "list_dora_deployments", @@ -204,7 +206,7 @@ def __init__(self, api_client=None): self._list_dora_failures_endpoint = _Endpoint( settings={ - "response_type": (DORAListResponse,), + "response_type": (DORAFailuresListResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], "endpoint_path": "/api/v2/dora/failures", "operation_id": "list_dora_failures", @@ -326,14 +328,14 @@ def delete_dora_failure( def get_dora_deployment( self, deployment_id: str, - ) -> DORAFetchResponse: + ) -> DORADeploymentFetchResponse: """Get a deployment event. Use this API endpoint to get a deployment event. :param deployment_id: The ID of the deployment event. :type deployment_id: str - :rtype: DORAFetchResponse + :rtype: DORADeploymentFetchResponse """ kwargs: Dict[str, Any] = {} kwargs["deployment_id"] = deployment_id @@ -343,14 +345,14 @@ def get_dora_deployment( def get_dora_failure( self, failure_id: str, - ) -> DORAFetchResponse: + ) -> DORAFailureFetchResponse: """Get a failure event. Use this API endpoint to get a failure event. :param failure_id: The ID of the failure event. :type failure_id: str - :rtype: DORAFetchResponse + :rtype: DORAFailureFetchResponse """ kwargs: Dict[str, Any] = {} kwargs["failure_id"] = failure_id @@ -360,13 +362,13 @@ def get_dora_failure( def list_dora_deployments( self, body: DORAListDeploymentsRequest, - ) -> DORAListResponse: + ) -> DORADeploymentsListResponse: """Get a list of deployment events. Use this API endpoint to get a list of deployment events. :type body: DORAListDeploymentsRequest - :rtype: DORAListResponse + :rtype: DORADeploymentsListResponse """ kwargs: Dict[str, Any] = {} kwargs["body"] = body @@ -376,13 +378,13 @@ def list_dora_deployments( def list_dora_failures( self, body: DORAListFailuresRequest, - ) -> DORAListResponse: + ) -> DORAFailuresListResponse: """Get a list of failure events. Use this API endpoint to get a list of failure events. :type body: DORAListFailuresRequest - :rtype: DORAListResponse + :rtype: DORAFailuresListResponse """ kwargs: Dict[str, Any] = {} kwargs["body"] = body diff --git a/src/datadog_api_client/v2/model/azure_storage_destination.py b/src/datadog_api_client/v2/model/azure_storage_destination.py index 283e2df4aa..11c78496d6 100644 --- a/src/datadog_api_client/v2/model/azure_storage_destination.py +++ b/src/datadog_api_client/v2/model/azure_storage_destination.py @@ -14,16 +14,28 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class AzureStorageDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType return { "blob_prefix": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "container_name": (str,), "id": (str,), "inputs": ([str],), @@ -32,6 +44,7 @@ def openapi_types(_): attribute_map = { "blob_prefix": "blob_prefix", + "buffer": "buffer", "container_name": "container_name", "id": "id", "inputs": "inputs", @@ -45,6 +58,13 @@ def __init__( inputs: List[str], type: AzureStorageDestinationType, blob_prefix: Union[str, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ @@ -53,6 +73,9 @@ def __init__( :param blob_prefix: Optional prefix for blobs written to the container. :type blob_prefix: str, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param container_name: The name of the Azure Blob Storage container to store logs in. :type container_name: str @@ -67,6 +90,8 @@ def __init__( """ if blob_prefix is not unset: kwargs["blob_prefix"] = blob_prefix + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.container_name = container_name diff --git a/src/datadog_api_client/v2/model/dora_deployment_fetch_response.py b/src/datadog_api_client/v2/model/dora_deployment_fetch_response.py new file mode 100644 index 0000000000..4ebb411025 --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_deployment_fetch_response.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_deployment_object import DORADeploymentObject + + +class DORADeploymentFetchResponse(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_deployment_object import DORADeploymentObject + + return { + "data": (DORADeploymentObject,), + } + + attribute_map = { + "data": "data", + } + + def __init__(self_, data: Union[DORADeploymentObject, UnsetType] = unset, **kwargs): + """ + Response for fetching a single deployment event. + + :param data: A DORA deployment event. + :type data: DORADeploymentObject, optional + """ + if data is not unset: + kwargs["data"] = data + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/dora_deployment_object.py b/src/datadog_api_client/v2/model/dora_deployment_object.py new file mode 100644 index 0000000000..ba8734cf96 --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_deployment_object.py @@ -0,0 +1,64 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_deployment_object_attributes import DORADeploymentObjectAttributes + from datadog_api_client.v2.model.dora_deployment_type import DORADeploymentType + + +class DORADeploymentObject(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_deployment_object_attributes import DORADeploymentObjectAttributes + from datadog_api_client.v2.model.dora_deployment_type import DORADeploymentType + + return { + "attributes": (DORADeploymentObjectAttributes,), + "id": (str,), + "type": (DORADeploymentType,), + } + + attribute_map = { + "attributes": "attributes", + "id": "id", + "type": "type", + } + + def __init__( + self_, + attributes: Union[DORADeploymentObjectAttributes, UnsetType] = unset, + id: Union[str, UnsetType] = unset, + type: Union[DORADeploymentType, UnsetType] = unset, + **kwargs, + ): + """ + A DORA deployment event. + + :param attributes: The attributes of the deployment event. + :type attributes: DORADeploymentObjectAttributes, optional + + :param id: The ID of the deployment event. + :type id: str, optional + + :param type: JSON:API type for DORA deployment events. + :type type: DORADeploymentType, optional + """ + if attributes is not unset: + kwargs["attributes"] = attributes + if id is not unset: + kwargs["id"] = id + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/dora_deployment_object_attributes.py b/src/datadog_api_client/v2/model/dora_deployment_object_attributes.py new file mode 100644 index 0000000000..3fa4b7eb07 --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_deployment_object_attributes.py @@ -0,0 +1,101 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + none_type, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_git_info import DORAGitInfo + + +class DORADeploymentObjectAttributes(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_git_info import DORAGitInfo + + return { + "custom_tags": ([str],), + "env": (str,), + "finished_at": (int,), + "git": (DORAGitInfo,), + "service": (str,), + "started_at": (int,), + "team": (str,), + "version": (str,), + } + + attribute_map = { + "custom_tags": "custom_tags", + "env": "env", + "finished_at": "finished_at", + "git": "git", + "service": "service", + "started_at": "started_at", + "team": "team", + "version": "version", + } + + def __init__( + self_, + finished_at: int, + service: str, + started_at: int, + custom_tags: Union[List[str], none_type, UnsetType] = unset, + env: Union[str, UnsetType] = unset, + git: Union[DORAGitInfo, UnsetType] = unset, + team: Union[str, UnsetType] = unset, + version: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The attributes of the deployment event. + + :param custom_tags: A list of user-defined tags. The tags must follow the ``key:value`` pattern. Up to 100 may be added per event. + :type custom_tags: [str], none_type, optional + + :param env: Environment name to where the service was deployed. + :type env: str, optional + + :param finished_at: Unix timestamp when the deployment finished. + :type finished_at: int + + :param git: Git info for DORA Metrics events. + :type git: DORAGitInfo, optional + + :param service: Service name. + :type service: str + + :param started_at: Unix timestamp when the deployment started. + :type started_at: int + + :param team: Name of the team owning the deployed service. + :type team: str, optional + + :param version: Version to correlate with APM Deployment Tracking. + :type version: str, optional + """ + if custom_tags is not unset: + kwargs["custom_tags"] = custom_tags + if env is not unset: + kwargs["env"] = env + if git is not unset: + kwargs["git"] = git + if team is not unset: + kwargs["team"] = team + if version is not unset: + kwargs["version"] = version + super().__init__(kwargs) + + self_.finished_at = finished_at + self_.service = service + self_.started_at = started_at diff --git a/src/datadog_api_client/v2/model/dora_deployments_list_response.py b/src/datadog_api_client/v2/model/dora_deployments_list_response.py new file mode 100644 index 0000000000..d454fdecd0 --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_deployments_list_response.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_deployment_object import DORADeploymentObject + + +class DORADeploymentsListResponse(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_deployment_object import DORADeploymentObject + + return { + "data": ([DORADeploymentObject],), + } + + attribute_map = { + "data": "data", + } + + def __init__(self_, data: Union[List[DORADeploymentObject], UnsetType] = unset, **kwargs): + """ + Response for the list deployments endpoint. + + :param data: The list of DORA deployment events. + :type data: [DORADeploymentObject], optional + """ + if data is not unset: + kwargs["data"] = data + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/dora_event.py b/src/datadog_api_client/v2/model/dora_event.py deleted file mode 100644 index 9f00b76800..0000000000 --- a/src/datadog_api_client/v2/model/dora_event.py +++ /dev/null @@ -1,56 +0,0 @@ -# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. -# This product includes software developed at Datadog (https://www.datadoghq.com/). -# Copyright 2019-Present Datadog, Inc. -from __future__ import annotations - -from typing import Union - -from datadog_api_client.model_utils import ( - ModelNormal, - cached_property, - unset, - UnsetType, -) - - -class DORAEvent(ModelNormal): - @cached_property - def openapi_types(_): - return { - "attributes": (dict,), - "id": (str,), - "type": (str,), - } - - attribute_map = { - "attributes": "attributes", - "id": "id", - "type": "type", - } - - def __init__( - self_, - attributes: Union[dict, UnsetType] = unset, - id: Union[str, UnsetType] = unset, - type: Union[str, UnsetType] = unset, - **kwargs, - ): - """ - A DORA event. - - :param attributes: The attributes of the event. - :type attributes: dict, optional - - :param id: The ID of the event. - :type id: str, optional - - :param type: The type of the event. - :type type: str, optional - """ - if attributes is not unset: - kwargs["attributes"] = attributes - if id is not unset: - kwargs["id"] = id - if type is not unset: - kwargs["type"] = type - super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/dora_fetch_response.py b/src/datadog_api_client/v2/model/dora_failure_fetch_response.py similarity index 58% rename from src/datadog_api_client/v2/model/dora_fetch_response.py rename to src/datadog_api_client/v2/model/dora_failure_fetch_response.py index a0f42842d7..3d1e5369cd 100644 --- a/src/datadog_api_client/v2/model/dora_fetch_response.py +++ b/src/datadog_api_client/v2/model/dora_failure_fetch_response.py @@ -14,28 +14,28 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.dora_event import DORAEvent + from datadog_api_client.v2.model.dora_incident_object import DORAIncidentObject -class DORAFetchResponse(ModelNormal): +class DORAFailureFetchResponse(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.dora_event import DORAEvent + from datadog_api_client.v2.model.dora_incident_object import DORAIncidentObject return { - "data": (DORAEvent,), + "data": (DORAIncidentObject,), } attribute_map = { "data": "data", } - def __init__(self_, data: Union[DORAEvent, UnsetType] = unset, **kwargs): + def __init__(self_, data: Union[DORAIncidentObject, UnsetType] = unset, **kwargs): """ - Response for the DORA fetch endpoints. + Response for fetching a single failure event. - :param data: A DORA event. - :type data: DORAEvent, optional + :param data: A DORA incident event. + :type data: DORAIncidentObject, optional """ if data is not unset: kwargs["data"] = data diff --git a/src/datadog_api_client/v2/model/dora_list_response.py b/src/datadog_api_client/v2/model/dora_failures_list_response.py similarity index 57% rename from src/datadog_api_client/v2/model/dora_list_response.py rename to src/datadog_api_client/v2/model/dora_failures_list_response.py index d98199af21..2290435b71 100644 --- a/src/datadog_api_client/v2/model/dora_list_response.py +++ b/src/datadog_api_client/v2/model/dora_failures_list_response.py @@ -14,28 +14,28 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.dora_event import DORAEvent + from datadog_api_client.v2.model.dora_incident_object import DORAIncidentObject -class DORAListResponse(ModelNormal): +class DORAFailuresListResponse(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.dora_event import DORAEvent + from datadog_api_client.v2.model.dora_incident_object import DORAIncidentObject return { - "data": ([DORAEvent],), + "data": ([DORAIncidentObject],), } attribute_map = { "data": "data", } - def __init__(self_, data: Union[List[DORAEvent], UnsetType] = unset, **kwargs): + def __init__(self_, data: Union[List[DORAIncidentObject], UnsetType] = unset, **kwargs): """ - Response for the DORA list endpoints. + Response for the list failures endpoint. - :param data: The list of DORA events. - :type data: [DORAEvent], optional + :param data: The list of DORA incident events. + :type data: [DORAIncidentObject], optional """ if data is not unset: kwargs["data"] = data diff --git a/src/datadog_api_client/v2/model/dora_incident_object.py b/src/datadog_api_client/v2/model/dora_incident_object.py new file mode 100644 index 0000000000..db99b44d0e --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_incident_object.py @@ -0,0 +1,64 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_incident_object_attributes import DORAIncidentObjectAttributes + from datadog_api_client.v2.model.dora_failure_type import DORAFailureType + + +class DORAIncidentObject(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_incident_object_attributes import DORAIncidentObjectAttributes + from datadog_api_client.v2.model.dora_failure_type import DORAFailureType + + return { + "attributes": (DORAIncidentObjectAttributes,), + "id": (str,), + "type": (DORAFailureType,), + } + + attribute_map = { + "attributes": "attributes", + "id": "id", + "type": "type", + } + + def __init__( + self_, + attributes: Union[DORAIncidentObjectAttributes, UnsetType] = unset, + id: Union[str, UnsetType] = unset, + type: Union[DORAFailureType, UnsetType] = unset, + **kwargs, + ): + """ + A DORA incident event. + + :param attributes: The attributes of the incident event. + :type attributes: DORAIncidentObjectAttributes, optional + + :param id: The ID of the incident event. + :type id: str, optional + + :param type: JSON:API type for DORA failure events. + :type type: DORAFailureType, optional + """ + if attributes is not unset: + kwargs["attributes"] = attributes + if id is not unset: + kwargs["id"] = id + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/dora_incident_object_attributes.py b/src/datadog_api_client/v2/model/dora_incident_object_attributes.py new file mode 100644 index 0000000000..1e302f6f5e --- /dev/null +++ b/src/datadog_api_client/v2/model/dora_incident_object_attributes.py @@ -0,0 +1,119 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + none_type, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.dora_git_info import DORAGitInfo + + +class DORAIncidentObjectAttributes(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.dora_git_info import DORAGitInfo + + return { + "custom_tags": ([str],), + "env": (str,), + "finished_at": (int,), + "git": (DORAGitInfo,), + "name": (str,), + "services": ([str],), + "severity": (str,), + "started_at": (int,), + "team": (str,), + "version": (str,), + } + + attribute_map = { + "custom_tags": "custom_tags", + "env": "env", + "finished_at": "finished_at", + "git": "git", + "name": "name", + "services": "services", + "severity": "severity", + "started_at": "started_at", + "team": "team", + "version": "version", + } + + def __init__( + self_, + started_at: int, + custom_tags: Union[List[str], none_type, UnsetType] = unset, + env: Union[str, UnsetType] = unset, + finished_at: Union[int, UnsetType] = unset, + git: Union[DORAGitInfo, UnsetType] = unset, + name: Union[str, UnsetType] = unset, + services: Union[List[str], UnsetType] = unset, + severity: Union[str, UnsetType] = unset, + team: Union[str, UnsetType] = unset, + version: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The attributes of the incident event. + + :param custom_tags: A list of user-defined tags. The tags must follow the ``key:value`` pattern. Up to 100 may be added per event. + :type custom_tags: [str], none_type, optional + + :param env: Environment name that was impacted by the incident. + :type env: str, optional + + :param finished_at: Unix timestamp when the incident finished. + :type finished_at: int, optional + + :param git: Git info for DORA Metrics events. + :type git: DORAGitInfo, optional + + :param name: Incident name. + :type name: str, optional + + :param services: Service names impacted by the incident. + :type services: [str], optional + + :param severity: Incident severity. + :type severity: str, optional + + :param started_at: Unix timestamp when the incident started. + :type started_at: int + + :param team: Name of the team owning the services impacted. + :type team: str, optional + + :param version: Version to correlate with APM Deployment Tracking. + :type version: str, optional + """ + if custom_tags is not unset: + kwargs["custom_tags"] = custom_tags + if env is not unset: + kwargs["env"] = env + if finished_at is not unset: + kwargs["finished_at"] = finished_at + if git is not unset: + kwargs["git"] = git + if name is not unset: + kwargs["name"] = name + if services is not unset: + kwargs["services"] = services + if severity is not unset: + kwargs["severity"] = severity + if team is not unset: + kwargs["team"] = team + if version is not unset: + kwargs["version"] = version + super().__init__(kwargs) + + self_.started_at = started_at diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py index 44868adb77..1d4d005b60 100644 --- a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py @@ -3,24 +3,38 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class MicrosoftSentinelDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType return { + "buffer": (ObservabilityPipelineBufferOptions,), "client_id": (str,), "dcr_immutable_id": (str,), "id": (str,), @@ -31,6 +45,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "client_id": "client_id", "dcr_immutable_id": "dcr_immutable_id", "id": "id", @@ -49,11 +64,21 @@ def __init__( table: str, tenant_id: str, type: MicrosoftSentinelDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``microsoft_sentinel`` destination forwards logs to Microsoft Sentinel. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param client_id: Azure AD client ID used for authentication. :type client_id: str @@ -75,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``microsoft_sentinel``. :type type: MicrosoftSentinelDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.client_id = client_id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py index 639d156f8b..518ff59fc3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py @@ -17,9 +17,19 @@ from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( ObservabilityPipelineAmazonOpenSearchDestinationAuth, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( ObservabilityPipelineAmazonOpenSearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineAmazonOpenSearchDestination(ModelNormal): @@ -28,12 +38,14 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( ObservabilityPipelineAmazonOpenSearchDestinationAuth, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( ObservabilityPipelineAmazonOpenSearchDestinationType, ) return { "auth": (ObservabilityPipelineAmazonOpenSearchDestinationAuth,), + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -42,6 +54,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -54,6 +67,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineAmazonOpenSearchDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): @@ -64,6 +84,9 @@ def __init__( The ``strategy`` field determines whether basic or AWS-based authentication is used. :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to. :type bulk_index: str, optional @@ -76,6 +99,8 @@ def __init__( :param type: The destination type. The value should always be ``amazon_opensearch``. :type type: ObservabilityPipelineAmazonOpenSearchDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py index 1a7af99838..99c74ad3fe 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py @@ -15,6 +15,7 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( ObservabilityPipelineAmazonS3DestinationStorageClass, ) @@ -22,12 +23,22 @@ from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type import ( ObservabilityPipelineAmazonS3DestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineAmazonS3Destination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( ObservabilityPipelineAmazonS3DestinationStorageClass, ) @@ -39,6 +50,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineAwsAuth,), "bucket": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "key_prefix": (str,), @@ -51,6 +63,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", "bucket": "bucket", + "buffer": "buffer", "id": "id", "inputs": "inputs", "key_prefix": "key_prefix", @@ -69,6 +82,13 @@ def __init__( storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass, type: ObservabilityPipelineAmazonS3DestinationType, auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, key_prefix: Union[str, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -83,6 +103,9 @@ def __init__( :param bucket: S3 bucket name. :type bucket: str + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: Unique identifier for the destination component. :type id: str @@ -106,6 +129,8 @@ def __init__( """ if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if key_prefix is not unset: kwargs["key_prefix"] = key_prefix if tls is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py index 6757c5f25c..f4cc4c4b9e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py @@ -15,16 +15,27 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination_type import ( ObservabilityPipelineAmazonSecurityLakeDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineAmazonSecurityLakeDestination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination_type import ( ObservabilityPipelineAmazonSecurityLakeDestinationType, @@ -33,6 +44,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineAwsAuth,), "bucket": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "custom_source_name": (str,), "id": (str,), "inputs": ([str],), @@ -44,6 +56,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", "bucket": "bucket", + "buffer": "buffer", "custom_source_name": "custom_source_name", "id": "id", "inputs": "inputs", @@ -61,6 +74,13 @@ def __init__( region: str, type: ObservabilityPipelineAmazonSecurityLakeDestinationType, auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): @@ -74,6 +94,9 @@ def __init__( :param bucket: Name of the Amazon S3 bucket in Security Lake (3-63 characters). :type bucket: str + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param custom_source_name: Custom source name for the logs in Security Lake. :type custom_source_name: str @@ -94,6 +117,8 @@ def __init__( """ if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if tls is not unset: kwargs["tls"] = tls super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py new file mode 100644 index 0000000000..2802e1e456 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py @@ -0,0 +1,57 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineBufferOptions(ModelComposed): + def __init__(self, **kwargs): + """ + Configuration for buffer settings on destination components. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: The type of the buffer that will be configured, a disk buffer. + :type type: ObservabilityPipelineBufferOptionsDiskType, optional + + :param when_full: Behavior when the buffer is full (block and stop accepting new events, or drop new events) + :type when_full: ObservabilityPipelineBufferOptionsWhenFull, optional + + :param max_events: Maximum events for the memory buffer. + :type max_events: int, optional + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) + + return { + "oneOf": [ + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py new file mode 100644 index 0000000000..f3eb53231d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineBufferOptionsDiskType(ModelSimple): + """ + The type of the buffer that will be configured, a disk buffer. + + :param value: If omitted defaults to "disk". Must be one of ["disk"]. + :type value: str + """ + + allowed_values = { + "disk", + } + DISK: ClassVar["ObservabilityPipelineBufferOptionsDiskType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineBufferOptionsDiskType.DISK = ObservabilityPipelineBufferOptionsDiskType("disk") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py new file mode 100644 index 0000000000..3c51d5c1b1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineBufferOptionsMemoryType(ModelSimple): + """ + The type of the buffer that will be configured, a memory buffer. + + :param value: If omitted defaults to "memory". Must be one of ["memory"]. + :type value: str + """ + + allowed_values = { + "memory", + } + MEMORY: ClassVar["ObservabilityPipelineBufferOptionsMemoryType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineBufferOptionsMemoryType.MEMORY = ObservabilityPipelineBufferOptionsMemoryType("memory") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_when_full.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_when_full.py new file mode 100644 index 0000000000..97312c012d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_when_full.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineBufferOptionsWhenFull(ModelSimple): + """ + Behavior when the buffer is full (block and stop accepting new events, or drop new events) + + :param value: If omitted defaults to "block". Must be one of ["block", "drop_newest"]. + :type value: str + """ + + allowed_values = { + "block", + "drop_newest", + } + BLOCK: ClassVar["ObservabilityPipelineBufferOptionsWhenFull"] + DROP_NEWEST: ClassVar["ObservabilityPipelineBufferOptionsWhenFull"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineBufferOptionsWhenFull.BLOCK = ObservabilityPipelineBufferOptionsWhenFull("block") +ObservabilityPipelineBufferOptionsWhenFull.DROP_NEWEST = ObservabilityPipelineBufferOptionsWhenFull("drop_newest") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index df018bec4e..89b866deed 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -15,6 +15,9 @@ def __init__(self, **kwargs): """ A destination for the pipeline. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py index d1dbc9facf..6e1681c416 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py @@ -14,6 +14,7 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination_compression import ( ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, ) @@ -24,11 +25,21 @@ from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination_type import ( ObservabilityPipelineCrowdStrikeNextGenSiemDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineCrowdStrikeNextGenSiemDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination_compression import ( ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, ) @@ -41,6 +52,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "compression": (ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression,), "encoding": (ObservabilityPipelineCrowdStrikeNextGenSiemDestinationEncoding,), "id": (str,), @@ -50,6 +62,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "compression": "compression", "encoding": "encoding", "id": "id", @@ -64,6 +77,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, compression: Union[ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -71,6 +91,9 @@ def __init__( """ The ``crowdstrike_next_gen_siem`` destination forwards logs to CrowdStrike Next Gen SIEM. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param compression: Compression configuration for log events. :type compression: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, optional @@ -89,6 +112,8 @@ def __init__( :param type: The destination type. The value should always be ``crowdstrike_next_gen_siem``. :type type: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if compression is not unset: kwargs["compression"] = compression if tls is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py index b519b48735..5554a4a554 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py @@ -3,43 +3,74 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineDatadogLogsDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "type": (ObservabilityPipelineDatadogLogsDestinationType,), } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "type": "type", } - def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatadogLogsDestinationType, **kwargs): + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineDatadogLogsDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, + **kwargs, + ): """ The ``datadog_logs`` destination forwards logs to Datadog Log Management. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -49,6 +80,8 @@ def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatad :param type: The destination type. The value should always be ``datadog_logs``. :type type: ObservabilityPipelineDatadogLogsDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py new file mode 100644 index 0000000000..5bcfc2b8ad --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py @@ -0,0 +1,72 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, + ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options_when_full import ( + ObservabilityPipelineBufferOptionsWhenFull, + ) + + +class ObservabilityPipelineDiskBufferOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, + ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options_when_full import ( + ObservabilityPipelineBufferOptionsWhenFull, + ) + + return { + "max_size": (int,), + "type": (ObservabilityPipelineBufferOptionsDiskType,), + "when_full": (ObservabilityPipelineBufferOptionsWhenFull,), + } + + attribute_map = { + "max_size": "max_size", + "type": "type", + "when_full": "when_full", + } + + def __init__( + self_, + max_size: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsDiskType, UnsetType] = unset, + when_full: Union[ObservabilityPipelineBufferOptionsWhenFull, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a disk buffer. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: The type of the buffer that will be configured, a disk buffer. + :type type: ObservabilityPipelineBufferOptionsDiskType, optional + + :param when_full: Behavior when the buffer is full (block and stop accepting new events, or drop new events) + :type when_full: ObservabilityPipelineBufferOptionsWhenFull, optional + """ + if max_size is not unset: + kwargs["max_size"] = max_size + if type is not unset: + kwargs["type"] = type + if when_full is not unset: + kwargs["when_full"] = when_full + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py index 8ce413b7ef..0689f64e11 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -17,9 +17,19 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineElasticsearchDestination(ModelNormal): @@ -28,12 +38,14 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) return { "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -42,6 +54,7 @@ def openapi_types(_): attribute_map = { "api_version": "api_version", + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -54,6 +67,13 @@ def __init__( inputs: List[str], type: ObservabilityPipelineElasticsearchDestinationType, api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): @@ -63,6 +83,9 @@ def __init__( :param api_version: The Elasticsearch API version to use. Set to ``auto`` to auto-detect. :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional @@ -77,6 +100,8 @@ def __init__( """ if api_version is not unset: kwargs["api_version"] = api_version + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py index 964a09e959..c338fc8f37 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py @@ -15,18 +15,29 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( ObservabilityPipelineGoogleChronicleDestinationEncoding, ) from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type import ( ObservabilityPipelineGoogleChronicleDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineGoogleChronicleDestination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( ObservabilityPipelineGoogleChronicleDestinationEncoding, ) @@ -36,6 +47,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineGcpAuth,), + "buffer": (ObservabilityPipelineBufferOptions,), "customer_id": (str,), "encoding": (ObservabilityPipelineGoogleChronicleDestinationEncoding,), "id": (str,), @@ -46,6 +58,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", + "buffer": "buffer", "customer_id": "customer_id", "encoding": "encoding", "id": "id", @@ -61,6 +74,13 @@ def __init__( inputs: List[str], type: ObservabilityPipelineGoogleChronicleDestinationType, auth: Union[ObservabilityPipelineGcpAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineGoogleChronicleDestinationEncoding, UnsetType] = unset, log_type: Union[str, UnsetType] = unset, **kwargs, @@ -71,6 +91,9 @@ def __init__( :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param customer_id: The Google Chronicle customer ID. :type customer_id: str @@ -91,6 +114,8 @@ def __init__( """ if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if log_type is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py index a2e8b0c337..c3ae6df5a8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py @@ -18,6 +18,7 @@ ObservabilityPipelineGoogleCloudStorageDestinationAcl, ) from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, @@ -25,6 +26,15 @@ from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type import ( ObservabilityPipelineGoogleCloudStorageDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineGoogleCloudStorageDestination(ModelNormal): @@ -34,6 +44,7 @@ def openapi_types(_): ObservabilityPipelineGoogleCloudStorageDestinationAcl, ) from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, @@ -46,6 +57,7 @@ def openapi_types(_): "acl": (ObservabilityPipelineGoogleCloudStorageDestinationAcl,), "auth": (ObservabilityPipelineGcpAuth,), "bucket": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "key_prefix": (str,), @@ -58,6 +70,7 @@ def openapi_types(_): "acl": "acl", "auth": "auth", "bucket": "bucket", + "buffer": "buffer", "id": "id", "inputs": "inputs", "key_prefix": "key_prefix", @@ -75,6 +88,13 @@ def __init__( type: ObservabilityPipelineGoogleCloudStorageDestinationType, acl: Union[ObservabilityPipelineGoogleCloudStorageDestinationAcl, UnsetType] = unset, auth: Union[ObservabilityPipelineGcpAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, key_prefix: Union[str, UnsetType] = unset, metadata: Union[List[ObservabilityPipelineMetadataEntry], UnsetType] = unset, **kwargs, @@ -92,6 +112,9 @@ def __init__( :param bucket: Name of the GCS bucket. :type bucket: str + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: Unique identifier for the destination component. :type id: str @@ -114,6 +137,8 @@ def __init__( kwargs["acl"] = acl if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if key_prefix is not unset: kwargs["key_prefix"] = key_prefix if metadata is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py index 5fc600f35e..47adceee9e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py @@ -15,6 +15,7 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination_encoding import ( ObservabilityPipelineGooglePubSubDestinationEncoding, ) @@ -22,12 +23,22 @@ from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination_type import ( ObservabilityPipelineGooglePubSubDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineGooglePubSubDestination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination_encoding import ( ObservabilityPipelineGooglePubSubDestinationEncoding, ) @@ -38,6 +49,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineGcpAuth,), + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineGooglePubSubDestinationEncoding,), "id": (str,), "inputs": ([str],), @@ -49,6 +61,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", + "buffer": "buffer", "encoding": "encoding", "id": "id", "inputs": "inputs", @@ -67,6 +80,13 @@ def __init__( topic: str, type: ObservabilityPipelineGooglePubSubDestinationType, auth: Union[ObservabilityPipelineGcpAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): @@ -76,6 +96,9 @@ def __init__( :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineGooglePubSubDestinationEncoding @@ -99,6 +122,8 @@ def __init__( """ if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if tls is not unset: kwargs["tls"] = tls super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py new file mode 100644 index 0000000000..9a146cd6e6 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + +class ObservabilityPipelineMemoryBufferOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + return { + "max_size": (int,), + "type": (ObservabilityPipelineBufferOptionsMemoryType,), + } + + attribute_map = { + "max_size": "max_size", + "type": "type", + } + + def __init__( + self_, + max_size: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsMemoryType, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a memory buffer by byte size. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: The type of the buffer that will be configured, a memory buffer. + :type type: ObservabilityPipelineBufferOptionsMemoryType, optional + """ + if max_size is not unset: + kwargs["max_size"] = max_size + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py new file mode 100644 index 0000000000..3218ba0b19 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + +class ObservabilityPipelineMemoryBufferSizeOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + return { + "max_events": (int,), + "type": (ObservabilityPipelineBufferOptionsMemoryType,), + } + + attribute_map = { + "max_events": "max_events", + "type": "type", + } + + def __init__( + self_, + max_events: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsMemoryType, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a memory buffer by queue length. + + :param max_events: Maximum events for the memory buffer. + :type max_events: int, optional + + :param type: The type of the buffer that will be configured, a memory buffer. + :type type: ObservabilityPipelineBufferOptionsMemoryType, optional + """ + if max_events is not unset: + kwargs["max_events"] = max_events + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py index ec0cb39481..e437d0714a 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py @@ -3,26 +3,39 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( ObservabilityPipelineNewRelicDestinationRegion, ) from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type import ( ObservabilityPipelineNewRelicDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineNewRelicDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( ObservabilityPipelineNewRelicDestinationRegion, ) @@ -31,6 +44,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "region": (ObservabilityPipelineNewRelicDestinationRegion,), @@ -38,6 +52,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "region": "region", @@ -50,11 +65,21 @@ def __init__( inputs: List[str], region: ObservabilityPipelineNewRelicDestinationRegion, type: ObservabilityPipelineNewRelicDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``new_relic`` destination sends logs to the New Relic platform. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -67,6 +92,8 @@ def __init__( :param type: The destination type. The value should always be ``new_relic``. :type type: ObservabilityPipelineNewRelicDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py index d817a4b558..be7b5e7730 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py @@ -14,19 +14,31 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineOpenSearchDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -34,6 +46,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -45,12 +58,22 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineOpenSearchDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): """ The ``opensearch`` destination writes logs to an OpenSearch cluster. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to. :type bulk_index: str, optional @@ -63,6 +86,8 @@ def __init__( :param type: The destination type. The value should always be ``opensearch``. :type type: ObservabilityPipelineOpenSearchDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py index 486442d4aa..a09ccda931 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py @@ -14,10 +14,20 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( ObservabilityPipelineRsyslogDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineRsyslogDestination(ModelNormal): @@ -29,12 +39,14 @@ class ObservabilityPipelineRsyslogDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( ObservabilityPipelineRsyslogDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "keepalive": (int,), @@ -43,6 +55,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "keepalive": "keepalive", @@ -55,6 +68,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineRsyslogDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, keepalive: Union[int, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -62,6 +82,9 @@ def __init__( """ The ``rsyslog`` destination forwards logs to an external ``rsyslog`` server over TCP or UDP using the syslog protocol. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -77,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``rsyslog``. :type type: ObservabilityPipelineRsyslogDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if keepalive is not unset: kwargs["keepalive"] = keepalive if tls is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py index 924aa9b0f5..d19c296e36 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py @@ -3,26 +3,39 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( ObservabilityPipelineSentinelOneDestinationRegion, ) from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type import ( ObservabilityPipelineSentinelOneDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSentinelOneDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( ObservabilityPipelineSentinelOneDestinationRegion, ) @@ -31,6 +44,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "region": (ObservabilityPipelineSentinelOneDestinationRegion,), @@ -38,6 +52,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "region": "region", @@ -50,11 +65,21 @@ def __init__( inputs: List[str], region: ObservabilityPipelineSentinelOneDestinationRegion, type: ObservabilityPipelineSentinelOneDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``sentinel_one`` destination sends logs to SentinelOne. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -67,6 +92,8 @@ def __init__( :param type: The destination type. The value should always be ``sentinel_one``. :type type: ObservabilityPipelineSentinelOneDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py index 8ff0d447f2..f1359ad670 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py @@ -14,6 +14,7 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_socket_destination_encoding import ( ObservabilityPipelineSocketDestinationEncoding, ) @@ -27,6 +28,15 @@ from datadog_api_client.v2.model.observability_pipeline_socket_destination_type import ( ObservabilityPipelineSocketDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) from datadog_api_client.v2.model.observability_pipeline_socket_destination_framing_newline_delimited import ( ObservabilityPipelineSocketDestinationFramingNewlineDelimited, ) @@ -41,6 +51,7 @@ class ObservabilityPipelineSocketDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_socket_destination_encoding import ( ObservabilityPipelineSocketDestinationEncoding, ) @@ -56,6 +67,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineSocketDestinationEncoding,), "framing": (ObservabilityPipelineSocketDestinationFraming,), "id": (str,), @@ -66,6 +78,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "encoding": "encoding", "framing": "framing", "id": "id", @@ -88,12 +101,22 @@ def __init__( inputs: List[str], mode: ObservabilityPipelineSocketDestinationMode, type: ObservabilityPipelineSocketDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): """ The ``socket`` destination sends logs over TCP or UDP to a remote server. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSocketDestinationEncoding @@ -115,6 +138,8 @@ def __init__( :param type: The destination type. The value should always be ``socket``. :type type: ObservabilityPipelineSocketDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if tls is not unset: kwargs["tls"] = tls super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py index e36b4aee74..49665d08f3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py @@ -14,17 +14,28 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( ObservabilityPipelineSplunkHecDestinationEncoding, ) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type import ( ObservabilityPipelineSplunkHecDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSplunkHecDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( ObservabilityPipelineSplunkHecDestinationEncoding, ) @@ -34,6 +45,7 @@ def openapi_types(_): return { "auto_extract_timestamp": (bool,), + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineSplunkHecDestinationEncoding,), "id": (str,), "index": (str,), @@ -44,6 +56,7 @@ def openapi_types(_): attribute_map = { "auto_extract_timestamp": "auto_extract_timestamp", + "buffer": "buffer", "encoding": "encoding", "id": "id", "index": "index", @@ -58,6 +71,13 @@ def __init__( inputs: List[str], type: ObservabilityPipelineSplunkHecDestinationType, auto_extract_timestamp: Union[bool, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineSplunkHecDestinationEncoding, UnsetType] = unset, index: Union[str, UnsetType] = unset, sourcetype: Union[str, UnsetType] = unset, @@ -70,6 +90,9 @@ def __init__( If ``false`` , Splunk assigns the time the event was received. :type auto_extract_timestamp: bool, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional @@ -90,6 +113,8 @@ def __init__( """ if auto_extract_timestamp is not unset: kwargs["auto_extract_timestamp"] = auto_extract_timestamp + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if index is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py index d49e3044f8..488c595631 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py @@ -14,6 +14,7 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( ObservabilityPipelineSumoLogicDestinationEncoding, ) @@ -23,11 +24,21 @@ from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type import ( ObservabilityPipelineSumoLogicDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSumoLogicDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( ObservabilityPipelineSumoLogicDestinationEncoding, ) @@ -39,6 +50,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineSumoLogicDestinationEncoding,), "header_custom_fields": ([ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem],), "header_host_name": (str,), @@ -50,6 +62,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "encoding": "encoding", "header_custom_fields": "header_custom_fields", "header_host_name": "header_host_name", @@ -65,6 +78,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineSumoLogicDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineSumoLogicDestinationEncoding, UnsetType] = unset, header_custom_fields: Union[ List[ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], UnsetType @@ -77,6 +97,9 @@ def __init__( """ The ``sumo_logic`` destination forwards logs to Sumo Logic. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: The output encoding format. :type encoding: ObservabilityPipelineSumoLogicDestinationEncoding, optional @@ -101,6 +124,8 @@ def __init__( :param type: The destination type. The value should always be ``sumo_logic``. :type type: ObservabilityPipelineSumoLogicDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if header_custom_fields is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py index 4984e69b5e..eb2a8c309d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py @@ -14,10 +14,20 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( ObservabilityPipelineSyslogNgDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSyslogNgDestination(ModelNormal): @@ -29,12 +39,14 @@ class ObservabilityPipelineSyslogNgDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( ObservabilityPipelineSyslogNgDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "keepalive": (int,), @@ -43,6 +55,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "keepalive": "keepalive", @@ -55,6 +68,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineSyslogNgDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, keepalive: Union[int, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -62,6 +82,9 @@ def __init__( """ The ``syslog_ng`` destination forwards logs to an external ``syslog-ng`` server over TCP or UDP using the syslog protocol. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -77,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``syslog_ng``. :type type: ObservabilityPipelineSyslogNgDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if keepalive is not unset: kwargs["keepalive"] = keepalive if tls is not unset: diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 39ee6ab446..8c693bff78 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -1305,21 +1305,27 @@ from datadog_api_client.v2.model.custom_framework_requirement import CustomFrameworkRequirement from datadog_api_client.v2.model.custom_framework_type import CustomFrameworkType from datadog_api_client.v2.model.custom_framework_without_requirements import CustomFrameworkWithoutRequirements +from datadog_api_client.v2.model.dora_deployment_fetch_response import DORADeploymentFetchResponse +from datadog_api_client.v2.model.dora_deployment_object import DORADeploymentObject +from datadog_api_client.v2.model.dora_deployment_object_attributes import DORADeploymentObjectAttributes from datadog_api_client.v2.model.dora_deployment_request import DORADeploymentRequest from datadog_api_client.v2.model.dora_deployment_request_attributes import DORADeploymentRequestAttributes from datadog_api_client.v2.model.dora_deployment_request_data import DORADeploymentRequestData from datadog_api_client.v2.model.dora_deployment_response import DORADeploymentResponse from datadog_api_client.v2.model.dora_deployment_response_data import DORADeploymentResponseData from datadog_api_client.v2.model.dora_deployment_type import DORADeploymentType -from datadog_api_client.v2.model.dora_event import DORAEvent +from datadog_api_client.v2.model.dora_deployments_list_response import DORADeploymentsListResponse +from datadog_api_client.v2.model.dora_failure_fetch_response import DORAFailureFetchResponse from datadog_api_client.v2.model.dora_failure_request import DORAFailureRequest from datadog_api_client.v2.model.dora_failure_request_attributes import DORAFailureRequestAttributes from datadog_api_client.v2.model.dora_failure_request_data import DORAFailureRequestData from datadog_api_client.v2.model.dora_failure_response import DORAFailureResponse from datadog_api_client.v2.model.dora_failure_response_data import DORAFailureResponseData from datadog_api_client.v2.model.dora_failure_type import DORAFailureType -from datadog_api_client.v2.model.dora_fetch_response import DORAFetchResponse +from datadog_api_client.v2.model.dora_failures_list_response import DORAFailuresListResponse from datadog_api_client.v2.model.dora_git_info import DORAGitInfo +from datadog_api_client.v2.model.dora_incident_object import DORAIncidentObject +from datadog_api_client.v2.model.dora_incident_object_attributes import DORAIncidentObjectAttributes from datadog_api_client.v2.model.dora_list_deployments_request import DORAListDeploymentsRequest from datadog_api_client.v2.model.dora_list_deployments_request_attributes import DORAListDeploymentsRequestAttributes from datadog_api_client.v2.model.dora_list_deployments_request_data import DORAListDeploymentsRequestData @@ -1328,7 +1334,6 @@ from datadog_api_client.v2.model.dora_list_failures_request_attributes import DORAListFailuresRequestAttributes from datadog_api_client.v2.model.dora_list_failures_request_data import DORAListFailuresRequestData from datadog_api_client.v2.model.dora_list_failures_request_data_type import DORAListFailuresRequestDataType -from datadog_api_client.v2.model.dora_list_response import DORAListResponse from datadog_api_client.v2.model.dashboard_list_add_items_request import DashboardListAddItemsRequest from datadog_api_client.v2.model.dashboard_list_add_items_response import DashboardListAddItemsResponse from datadog_api_client.v2.model.dashboard_list_delete_items_request import DashboardListDeleteItemsRequest @@ -2924,6 +2929,16 @@ ObservabilityPipelineAmazonSecurityLakeDestinationType, ) from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth +from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions +from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, +) +from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, +) +from datadog_api_client.v2.model.observability_pipeline_buffer_options_when_full import ( + ObservabilityPipelineBufferOptionsWhenFull, +) from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, @@ -2991,6 +3006,9 @@ from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type import ( ObservabilityPipelineDedupeProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, +) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) @@ -3126,6 +3144,12 @@ from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( ObservabilityPipelineLogstashSourceType, ) +from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, +) +from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, +) from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( @@ -6095,21 +6119,27 @@ "CustomFrameworkRequirement", "CustomFrameworkType", "CustomFrameworkWithoutRequirements", + "DORADeploymentFetchResponse", + "DORADeploymentObject", + "DORADeploymentObjectAttributes", "DORADeploymentRequest", "DORADeploymentRequestAttributes", "DORADeploymentRequestData", "DORADeploymentResponse", "DORADeploymentResponseData", "DORADeploymentType", - "DORAEvent", + "DORADeploymentsListResponse", + "DORAFailureFetchResponse", "DORAFailureRequest", "DORAFailureRequestAttributes", "DORAFailureRequestData", "DORAFailureResponse", "DORAFailureResponseData", "DORAFailureType", - "DORAFetchResponse", + "DORAFailuresListResponse", "DORAGitInfo", + "DORAIncidentObject", + "DORAIncidentObjectAttributes", "DORAListDeploymentsRequest", "DORAListDeploymentsRequestAttributes", "DORAListDeploymentsRequestData", @@ -6118,7 +6148,6 @@ "DORAListFailuresRequestAttributes", "DORAListFailuresRequestData", "DORAListFailuresRequestDataType", - "DORAListResponse", "DashboardListAddItemsRequest", "DashboardListAddItemsResponse", "DashboardListDeleteItemsRequest", @@ -7380,6 +7409,10 @@ "ObservabilityPipelineAmazonSecurityLakeDestination", "ObservabilityPipelineAmazonSecurityLakeDestinationType", "ObservabilityPipelineAwsAuth", + "ObservabilityPipelineBufferOptions", + "ObservabilityPipelineBufferOptionsDiskType", + "ObservabilityPipelineBufferOptionsMemoryType", + "ObservabilityPipelineBufferOptionsWhenFull", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", "ObservabilityPipelineConfigProcessorGroup", @@ -7407,6 +7440,7 @@ "ObservabilityPipelineDedupeProcessor", "ObservabilityPipelineDedupeProcessorMode", "ObservabilityPipelineDedupeProcessorType", + "ObservabilityPipelineDiskBufferOptions", "ObservabilityPipelineElasticsearchDestination", "ObservabilityPipelineElasticsearchDestinationApiVersion", "ObservabilityPipelineElasticsearchDestinationType", @@ -7460,6 +7494,8 @@ "ObservabilityPipelineKafkaSourceType", "ObservabilityPipelineLogstashSource", "ObservabilityPipelineLogstashSourceType", + "ObservabilityPipelineMemoryBufferOptions", + "ObservabilityPipelineMemoryBufferSizeOptions", "ObservabilityPipelineMetadataEntry", "ObservabilityPipelineMetricValue", "ObservabilityPipelineNewRelicDestination", diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen index 73133f1c75..b835c39be3 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:15.575Z \ No newline at end of file +2026-01-06T13:50:00.603Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen index 20165353d5..f99b046bd9 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:16.062Z \ No newline at end of file +2026-01-06T13:50:01.032Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml index 6fc5f16843..7feaccbec7 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"bd8d693c-dc2c-11f0-bf69-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"98cbafb2-eb06-11f0-b183-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -27,7 +27,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bd8d693c-dc2c-11f0-bf69-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/98cbafb2-eb06-11f0-b183-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen index 85deda9192..1a1e0e6af8 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.165Z \ No newline at end of file +2026-01-06T13:50:02.108Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen index 201ee9bda8..b0c0cd0458 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.716Z \ No newline at end of file +2026-01-06T13:50:02.599Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml index c18e0ea31d..73ac956af6 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml @@ -13,7 +13,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"be89fea4-dc2c-11f0-bdea-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"99bc2ab4-eb06-11f0-a7ff-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -31,7 +31,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/99bc2ab4-eb06-11f0-a7ff-da7ad0900002 response: body: string: '' @@ -47,7 +47,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/99bc2ab4-eb06-11f0-a7ff-da7ad0900002 response: body: string: '{"errors":[{"title":"Resource Not Found"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen index 2da6d0e5c1..8a7dea973b 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:20.018Z \ No newline at end of file +2026-01-06T13:50:04.791Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml index 1d78b7a9d4..60657e4b19 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml @@ -13,7 +13,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9b06af3e-eb06-11f0-a801-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -31,10 +31,10 @@ interactions: accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9b06af3e-eb06-11f0-a801-da7ad0900002 response: body: - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9b06af3e-eb06-11f0-a801-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -52,7 +52,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9b06af3e-eb06-11f0-a801-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen index a1ae2640d7..e5c3681b9a 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.038Z \ No newline at end of file +2026-01-06T13:50:06.623Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen index fb745af4ce..1e45abc563 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.507Z \ No newline at end of file +2026-01-06T13:50:07.036Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml index 65aa8d7f7f..753593742e 100644 --- a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml @@ -13,7 +13,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9c5e6732-eb06-11f0-a803-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -34,10 +34,26 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":[{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":[{"id":"4bf478ba-dc68-11f0-87e9-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"a78e416a-de66-11f0-a039-da7ad0900002","type":"pipelines","attributes":{"name":"http-server-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth_strategy":"plain","decoding":"json","id":"http-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/http.crt","key_file":"/etc/ssl/private/http.key"},"type":"http_server"}]}}},{"id":"a84fd58c-de66-11f0-a03b-da7ad0900002","type":"pipelines","attributes":{"name":"amazon_s3-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["s3-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/test-role","external_id":"external-test-id","session_name":"session-test"},"id":"s3-source-1","region":"us-east-1","tls":{"ca_file":"/etc/ssl/certs/s3.ca","crt_file":"/etc/ssl/certs/s3.crt","key_file":"/etc/ssl/private/s3.key"},"type":"amazon_s3"}]}}},{"id":"a42e22e0-df49-11f0-81d5-da7ad0900002","type":"pipelines","attributes":{"name":"dedupe + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["dedupe-group-2"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"2cd3c342-e0c2-11f0-9d34-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-group-1"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"20f4849c-e579-11f0-af79-da7ad0900002","type":"pipelines","attributes":{"name":"fluent-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["fluent-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"fluent-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/fluent.crt","key_file":"/etc/ssl/private/fluent.key"},"type":"fluentd"}]}}},{"id":"15621afe-e669-11f0-bec3-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My - Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":1}} + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"dfbeb25a-e6c1-11f0-9bc1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"923fbdb6-e771-11f0-9388-da7ad0900002","type":"pipelines","attributes":{"name":"http-client + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth_strategy":"basic","decoding":"json","id":"http-source-1","scrape_interval_secs":60,"scrape_timeout_secs":10,"tls":{"crt_file":"/path/to/http.crt"},"type":"http_client"}]}}},{"id":"a7b600ce-e771-11f0-939c-da7ad0900002","type":"pipelines","attributes":{"name":"newrelic + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["source-1"],"region":"us","type":"new_relic"}],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"306bab4c-e904-11f0-aa8a-da7ad0900002","type":"pipelines","attributes":{"name":"splunk-hec-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["splunk-hec-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"splunk-hec-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/splunk.crt","key_file":"/etc/ssl/private/splunk.key"},"type":"splunk_hec"}]}}},{"id":"51faefca-e922-11f0-a260-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"8d025dea-ea96-11f0-8a79-da7ad0900002","type":"pipelines","attributes":{"name":"crowdstrike-next-gen-siem-destination-pipeline-basic","config":{"destinations":[{"encoding":"raw_message","id":"crowdstrike-dest-basic-1","inputs":["source-1"],"type":"crowdstrike_next_gen_siem"}],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"ed4d493e-eabf-11f0-852d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"9c5e6732-eb06-11f0-a803-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":15}} ' headers: @@ -52,7 +68,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c162e83e-dc2c-11f0-bf6b-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9c5e6732-eb06-11f0-a803-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen index 92a0e9377f..ef250224cd 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:24.455Z \ No newline at end of file +2026-01-06T13:50:09.046Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml index 390b6216f0..91ba4890ec 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml @@ -13,7 +13,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c28a5ad0-dc2c-11f0-bdee-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9d8eebe0-eb06-11f0-b185-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -34,7 +34,7 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9d8eebe0-eb06-11f0-b185-da7ad0900002 response: body: string: '{"errors":[{"title":"Component with ID my-processor-group is an unknown @@ -56,7 +56,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9d8eebe0-eb06-11f0-b185-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen index b083fd34ae..cea63a8776 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.411Z \ No newline at end of file +2026-01-06T13:50:10.811Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen index b9cbd5fcac..e9d2e85c6b 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.929Z \ No newline at end of file +2026-01-06T13:50:11.272Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml index 5e9ce1cb89..ed57843d16 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml @@ -13,7 +13,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9ee260d0-eb06-11f0-b187-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -34,10 +34,10 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9ee260d0-eb06-11f0-b187-da7ad0900002 response: body: - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Updated + string: '{"data":{"id":"9ee260d0-eb06-11f0-b187-da7ad0900002","type":"pipelines","attributes":{"name":"Updated Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -53,7 +53,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9ee260d0-eb06-11f0-b187-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen index 745856e49a..78ec483340 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.179Z \ No newline at end of file +2026-01-06T13:50:14.036Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen index ecfcc54162..8b15ab7629 100644 --- a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.647Z \ No newline at end of file +2026-01-06T13:50:14.439Z \ No newline at end of file