Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .generated-info
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"spec_repo_commit": "69305be",
"generated": "2025-08-05 16:21:57.004"
"spec_repo_commit": "884871f",
"generated": "2025-08-05 21:32:37.242"
}
91 changes: 91 additions & 0 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4946,6 +4946,8 @@ components:
description: Optional prefix for blobs written to the container.
example: logs/
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
container_name:
description: The name of the Azure Blob Storage container to store logs
in.
Expand Down Expand Up @@ -24998,6 +25000,8 @@ components:
description: The `microsoft_sentinel` destination forwards logs to Microsoft
Sentinel.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
client_id:
description: Azure AD client ID used for authentication.
example: a1b2c3d4-5678-90ab-cdef-1234567890ab
Expand Down Expand Up @@ -26630,6 +26634,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
Expand Down Expand Up @@ -26708,6 +26714,8 @@ components:
description: S3 bucket name.
example: error-logs
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: amazon-s3-destination
Expand Down Expand Up @@ -26827,6 +26835,28 @@ components:
role session.
type: string
type: object
ObservabilityPipelineBufferOptions:
description: Configuration for buffer settings on destination components.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions'
- $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions'
- $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions'
ObservabilityPipelineBufferOptionsDiskType:
default: disk
description: The type of the buffer that will be configured, a disk buffer.
enum:
- disk
type: string
x-enum-varnames:
- DISK
ObservabilityPipelineBufferOptionsMemoryType:
default: memory
description: The type of the buffer that will be configured, a memory buffer.
enum:
- memory
type: string
x-enum-varnames:
- MEMORY
ObservabilityPipelineConfig:
description: Specifies the pipeline's configuration, including its sources,
processors, and destinations.
Expand Down Expand Up @@ -26982,6 +27012,8 @@ components:
ObservabilityPipelineDatadogLogsDestination:
description: The `datadog_logs` destination forwards logs to Datadog Log Management.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: datadog-logs-destination
Expand Down Expand Up @@ -27083,12 +27115,27 @@ components:
type: string
x-enum-varnames:
- DEDUPE
ObservabilityPipelineDiskBufferOptions:
description: Options for configuring a disk buffer.
properties:
max_size:
description: Maximum size of the disk buffer.
example: 4096
format: int64
maximum: 536870912000
minimum: 268435488
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType'
type: object
ObservabilityPipelineElasticsearchDestination:
description: The `elasticsearch` destination writes logs to an Elasticsearch
cluster.
properties:
api_version:
$ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to in Elasticsearch.
example: logs-index
Expand Down Expand Up @@ -27573,6 +27620,8 @@ components:
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
customer_id:
description: The Google Chronicle customer ID.
example: abcdefg123456789
Expand Down Expand Up @@ -27639,6 +27688,8 @@ components:
description: Name of the GCS bucket.
example: error-logs
type: string
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: Unique identifier for the destination component.
example: gcs-destination
Expand Down Expand Up @@ -27949,6 +28000,32 @@ components:
type: string
x-enum-varnames:
- LOGSTASH
ObservabilityPipelineMemoryBufferOptions:
description: Options for configuring a memory buffer by byte size.
properties:
max_size:
description: Maximum size of the disk buffer.
example: 4096
format: int64
maximum: 13743895347
minimum: 1
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
type: object
ObservabilityPipelineMemoryBufferSizeOptions:
description: Options for configuring a memory buffer by queue length.
properties:
max_events:
description: The `ObservabilityPipelineMemoryBufferSizeOptions` `max_events`.
example: 500
format: int64
maximum: 268435456
minimum: 1
type: integer
type:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType'
type: object
ObservabilityPipelineMetadataEntry:
description: A custom metadata entry.
properties:
Expand All @@ -27972,6 +28049,8 @@ components:
ObservabilityPipelineNewRelicDestination:
description: The `new_relic` destination sends logs to the New Relic platform.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: new-relic-destination
Expand Down Expand Up @@ -28109,6 +28188,8 @@ components:
ObservabilityPipelineOpenSearchDestination:
description: The `opensearch` destination writes logs to an OpenSearch cluster.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
bulk_index:
description: The index to write logs to.
example: logs-index
Expand Down Expand Up @@ -28683,6 +28764,8 @@ components:
description: The `rsyslog` destination forwards logs to an external `rsyslog`
server over TCP or UDP using the syslog protocol.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: rsyslog-destination
Expand Down Expand Up @@ -29157,6 +29240,8 @@ components:
ObservabilityPipelineSentinelOneDestination:
description: The `sentinel_one` destination sends logs to SentinelOne.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: sentinelone-destination
Expand Down Expand Up @@ -29241,6 +29326,8 @@ components:
'
example: true
type: boolean
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding'
id:
Expand Down Expand Up @@ -29354,6 +29441,8 @@ components:
ObservabilityPipelineSumoLogicDestination:
description: The `sumo_logic` destination forwards logs to Sumo Logic.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding'
header_custom_fields:
Expand Down Expand Up @@ -29457,6 +29546,8 @@ components:
description: The `syslog_ng` destination forwards logs to an external `syslog-ng`
server over TCP or UDP using the syslog protocol.
properties:
buffer:
$ref: '#/components/schemas/ObservabilityPipelineBufferOptions'
id:
description: The unique identifier for this component.
example: syslog-ng-destination
Expand Down
47 changes: 41 additions & 6 deletions api/datadogV2/model_azure_storage_destination.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ import (
type AzureStorageDestination struct {
// Optional prefix for blobs written to the container.
BlobPrefix *string `json:"blob_prefix,omitempty"`
// Configuration for buffer settings on destination components.
Buffer *ObservabilityPipelineBufferOptions `json:"buffer,omitempty"`
// The name of the Azure Blob Storage container to store logs in.
ContainerName string `json:"container_name"`
// The unique identifier for this component.
Expand Down Expand Up @@ -78,6 +80,34 @@ func (o *AzureStorageDestination) SetBlobPrefix(v string) {
o.BlobPrefix = &v
}

// GetBuffer returns the Buffer field value if set, zero value otherwise.
func (o *AzureStorageDestination) GetBuffer() ObservabilityPipelineBufferOptions {
if o == nil || o.Buffer == nil {
var ret ObservabilityPipelineBufferOptions
return ret
}
return *o.Buffer
}

// GetBufferOk returns a tuple with the Buffer field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *AzureStorageDestination) GetBufferOk() (*ObservabilityPipelineBufferOptions, bool) {
if o == nil || o.Buffer == nil {
return nil, false
}
return o.Buffer, true
}

// HasBuffer returns a boolean if a field has been set.
func (o *AzureStorageDestination) HasBuffer() bool {
return o != nil && o.Buffer != nil
}

// SetBuffer gets a reference to the given ObservabilityPipelineBufferOptions and assigns it to the Buffer field.
func (o *AzureStorageDestination) SetBuffer(v ObservabilityPipelineBufferOptions) {
o.Buffer = &v
}

// GetContainerName returns the ContainerName field value.
func (o *AzureStorageDestination) GetContainerName() string {
if o == nil {
Expand Down Expand Up @@ -179,6 +209,9 @@ func (o AzureStorageDestination) MarshalJSON() ([]byte, error) {
if o.BlobPrefix != nil {
toSerialize["blob_prefix"] = o.BlobPrefix
}
if o.Buffer != nil {
toSerialize["buffer"] = o.Buffer
}
toSerialize["container_name"] = o.ContainerName
toSerialize["id"] = o.Id
toSerialize["inputs"] = o.Inputs
Expand All @@ -193,11 +226,12 @@ func (o AzureStorageDestination) MarshalJSON() ([]byte, error) {
// UnmarshalJSON deserializes the given payload.
func (o *AzureStorageDestination) UnmarshalJSON(bytes []byte) (err error) {
all := struct {
BlobPrefix *string `json:"blob_prefix,omitempty"`
ContainerName *string `json:"container_name"`
Id *string `json:"id"`
Inputs *[]string `json:"inputs"`
Type *AzureStorageDestinationType `json:"type"`
BlobPrefix *string `json:"blob_prefix,omitempty"`
Buffer *ObservabilityPipelineBufferOptions `json:"buffer,omitempty"`
ContainerName *string `json:"container_name"`
Id *string `json:"id"`
Inputs *[]string `json:"inputs"`
Type *AzureStorageDestinationType `json:"type"`
}{}
if err = datadog.Unmarshal(bytes, &all); err != nil {
return datadog.Unmarshal(bytes, &o.UnparsedObject)
Expand All @@ -216,13 +250,14 @@ func (o *AzureStorageDestination) UnmarshalJSON(bytes []byte) (err error) {
}
additionalProperties := make(map[string]interface{})
if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil {
datadog.DeleteKeys(additionalProperties, &[]string{"blob_prefix", "container_name", "id", "inputs", "type"})
datadog.DeleteKeys(additionalProperties, &[]string{"blob_prefix", "buffer", "container_name", "id", "inputs", "type"})
} else {
return err
}

hasInvalidField := false
o.BlobPrefix = all.BlobPrefix
o.Buffer = all.Buffer
o.ContainerName = *all.ContainerName
o.Id = *all.Id
o.Inputs = *all.Inputs
Expand Down
Loading
Loading