Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .generated-info
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"spec_repo_commit": "4413e63",
"generated": "2025-08-19 20:28:34.170"
"spec_repo_commit": "ca16233",
"generated": "2025-08-21 17:08:30.242"
}
110 changes: 105 additions & 5 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4983,6 +4983,8 @@ components:
description: Optional prefix for blobs written to the container.
example: logs/
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
container_name:
description: The name of the Azure Blob Storage container to store logs
in.
Expand Down Expand Up @@ -25037,6 +25039,8 @@ components:
description: The `microsoft_sentinel` destination forwards logs to Microsoft
Sentinel.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
client_id:
description: Azure AD client ID used for authentication.
example: a1b2c3d4-5678-90ab-cdef-1234567890ab
Expand Down Expand Up @@ -26669,6 +26673,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
Expand Down Expand Up @@ -26747,6 +26753,8 @@ components:
description: S3 bucket name.
example: error-logs
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: amazon-s3-destination
Expand Down Expand Up @@ -26919,6 +26927,30 @@ components:
role session.
type: string
type: object
ObservabilityPipelineBufferOptions:
description: Configuration for buffer settings on destination components.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions'
- $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions'
- $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions'
ObservabilityPipelineBufferOptionsDiskType:
default: disk
description: Specifies the buffer type to configure. This option supports only
a disk buffer.
enum:
- disk
type: string
x-enum-varnames:
- DISK
ObservabilityPipelineBufferOptionsMemoryType:
default: memory
description: Specifies the buffer type to configure. This option supports only
a memory buffer.
enum:
- memory
type: string
x-enum-varnames:
- MEMORY
ObservabilityPipelineConfig:
description: Specifies the pipeline's configuration, including its sources,
processors, and destinations.
Expand Down Expand Up @@ -27232,6 +27264,8 @@ components:
ObservabilityPipelineDatadogLogsDestination:
description: The `datadog_logs` destination forwards logs to Datadog Log Management.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: datadog-logs-destination
Expand Down Expand Up @@ -27407,12 +27441,25 @@ components:
type: string
x-enum-varnames:
- DEDUPE
ObservabilityPipelineDiskBufferOptions:
description: Options for configuring a disk buffer.
properties:
max_size:
description: Maximum size of the disk buffer.
example: 4096
format: int64
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType'
type: object
ObservabilityPipelineElasticsearchDestination:
description: The `elasticsearch` destination writes logs to an Elasticsearch
cluster.
properties:
api_version:
$ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to in Elasticsearch.
example: logs-index
Expand Down Expand Up @@ -27897,6 +27944,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
customer_id:
description: The Google Chronicle customer ID.
example: abcdefg123456789
Expand Down Expand Up @@ -27963,6 +28012,8 @@ components:
description: Name of the GCS bucket.
example: error-logs
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: gcs-destination
Expand Down Expand Up @@ -28273,6 +28324,28 @@ components:
type: string
x-enum-varnames:
- LOGSTASH
ObservabilityPipelineMemoryBufferOptions:
description: Options for configuring a memory buffer by byte size.
properties:
max_size:
description: Maximum size of the disk buffer.
example: 4096
format: int64
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
type: object
ObservabilityPipelineMemoryBufferSizeOptions:
description: Options for configuring a memory buffer by queue length.
properties:
max_events:
description: Maximum events for the memory buffer.
example: 500
format: int64
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
type: object
ObservabilityPipelineMetadataEntry:
description: A custom metadata entry.
properties:
Expand All @@ -28296,6 +28369,8 @@ components:
ObservabilityPipelineNewRelicDestination:
description: The `new_relic` destination sends logs to the New Relic platform.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: new-relic-destination
Expand Down Expand Up @@ -28433,6 +28508,8 @@ components:
ObservabilityPipelineOpenSearchDestination:
description: The `opensearch` destination writes logs to an OpenSearch cluster.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
Expand Down Expand Up @@ -28657,9 +28734,10 @@ components:
can drop or alert.
properties:
drop_events:
description: If set to `true`, logs that matched the quota filter and sent
after the quota has been met are dropped; only logs that did not match
the filter query continue through the pipeline.
description: 'If set to `true`, logs that match the quota filter and are
sent after the quota is exceeded are dropped. Logs that do not match the
filter continue through the pipeline. **Note**: You can set either `drop_events`
or `overflow_action`, but not both.'
example: false
type: boolean
id:
Expand Down Expand Up @@ -28707,14 +28785,15 @@ components:
items:
type: string
type: array
too_many_buckets_action:
$ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction'
type:
$ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType'
required:
- id
- type
- include
- name
- drop_events
- limit
- inputs
type: object
Expand Down Expand Up @@ -28745,7 +28824,8 @@ components:
- BYTES
- EVENTS
ObservabilityPipelineQuotaProcessorOverflowAction:
description: 'The action to take when the quota is exceeded. Options:
description: 'The action to take when the quota or bucket limit is exceeded.
Options:

- `drop`: Drop the event.

Expand Down Expand Up @@ -29006,6 +29086,8 @@ components:
description: The `rsyslog` destination forwards logs to an external `rsyslog`
server over TCP or UDP using the syslog protocol.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: rsyslog-destination
Expand Down Expand Up @@ -29076,6 +29158,16 @@ components:
description: The `sample` processor allows probabilistic sampling of logs at
a fixed rate.
properties:
group_by:
description: Optional list of fields to group events by. Each group is sampled
independently.
example:
- service
- host
items:
type: string
minItems: 1
type: array
id:
description: The unique identifier for this component. Used to reference
this component in other parts of the pipeline (for example, as the `input`
Expand Down Expand Up @@ -29480,6 +29572,8 @@ components:
ObservabilityPipelineSentinelOneDestination:
description: The `sentinel_one` destination sends logs to SentinelOne.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: sentinelone-destination
Expand Down Expand Up @@ -29836,6 +29930,8 @@ components:
'
example: true
type: boolean
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding'
id:
Expand Down Expand Up @@ -29949,6 +30045,8 @@ components:
ObservabilityPipelineSumoLogicDestination:
description: The `sumo_logic` destination forwards logs to Sumo Logic.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding'
header_custom_fields:
Expand Down Expand Up @@ -30052,6 +30150,8 @@ components:
description: The `syslog_ng` destination forwards logs to an external `syslog-ng`
server over TCP or UDP using the syslog protocol.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: syslog-ng-destination
Expand Down
6 changes: 6 additions & 0 deletions lib/datadog_api_client/inflector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2623,6 +2623,9 @@ def overrides
"v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination",
"v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType",
"v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth",
"v2.observability_pipeline_buffer_options" => "ObservabilityPipelineBufferOptions",
"v2.observability_pipeline_buffer_options_disk_type" => "ObservabilityPipelineBufferOptionsDiskType",
"v2.observability_pipeline_buffer_options_memory_type" => "ObservabilityPipelineBufferOptionsMemoryType",
"v2.observability_pipeline_config" => "ObservabilityPipelineConfig",
"v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem",
"v2.observability_pipeline_config_processor_item" => "ObservabilityPipelineConfigProcessorItem",
Expand All @@ -2649,6 +2652,7 @@ def overrides
"v2.observability_pipeline_dedupe_processor" => "ObservabilityPipelineDedupeProcessor",
"v2.observability_pipeline_dedupe_processor_mode" => "ObservabilityPipelineDedupeProcessorMode",
"v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType",
"v2.observability_pipeline_disk_buffer_options" => "ObservabilityPipelineDiskBufferOptions",
"v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination",
"v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion",
"v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType",
Expand Down Expand Up @@ -2699,6 +2703,8 @@ def overrides
"v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType",
"v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource",
"v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType",
"v2.observability_pipeline_memory_buffer_options" => "ObservabilityPipelineMemoryBufferOptions",
"v2.observability_pipeline_memory_buffer_size_options" => "ObservabilityPipelineMemoryBufferSizeOptions",
"v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry",
"v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue",
"v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination",
Expand Down
12 changes: 11 additions & 1 deletion lib/datadog_api_client/v2/models/azure_storage_destination.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ class AzureStorageDestination
# Optional prefix for blobs written to the container.
attr_accessor :blob_prefix

# Configuration for buffer settings on destination components.
attr_accessor :buffer

# The name of the Azure Blob Storage container to store logs in.
attr_reader :container_name

Expand All @@ -43,6 +46,7 @@ class AzureStorageDestination
def self.attribute_map
{
:'blob_prefix' => :'blob_prefix',
:'buffer' => :'buffer',
:'container_name' => :'container_name',
:'id' => :'id',
:'inputs' => :'inputs',
Expand All @@ -55,6 +59,7 @@ def self.attribute_map
def self.openapi_types
{
:'blob_prefix' => :'String',
:'buffer' => :'ObservabilityPipelineBufferOptions',
:'container_name' => :'String',
:'id' => :'String',
:'inputs' => :'Array<String>',
Expand Down Expand Up @@ -84,6 +89,10 @@ def initialize(attributes = {})
self.blob_prefix = attributes[:'blob_prefix']
end

if attributes.key?(:'buffer')
self.buffer = attributes[:'buffer']
end

if attributes.key?(:'container_name')
self.container_name = attributes[:'container_name']
end
Expand Down Expand Up @@ -181,6 +190,7 @@ def ==(o)
return true if self.equal?(o)
self.class == o.class &&
blob_prefix == o.blob_prefix &&
buffer == o.buffer &&
container_name == o.container_name &&
id == o.id &&
inputs == o.inputs &&
Expand All @@ -192,7 +202,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[blob_prefix, container_name, id, inputs, type, additional_properties].hash
[blob_prefix, buffer, container_name, id, inputs, type, additional_properties].hash
end
end
end
Loading
Loading