Skip to content

Commit 445909d

Browse files
refactor(log-agent): rename FlusherMode → Destination; add #[must_use] to from_env
- FlusherMode renamed to Destination — it describes where logs are sent, not how - #[must_use] added to LogFlusherConfig::from_env() to catch ignored return values Rationale: reviewer feedback — FlusherMode name is misleading; Destination is more accurate This commit made by [/dd:git:commit:quick](https://github.com/DataDog/claude-marketplace/tree/main/dd/commands/git/commit/quick.md)
1 parent 6036707 commit 445909d

7 files changed

Lines changed: 51 additions & 48 deletions

File tree

Cargo.lock

Lines changed: 20 additions & 20 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/datadog-log-agent/examples/send_logs.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,9 @@
4242
//! | DD_OBSERVABILITY_PIPELINES_WORKER_LOGS_URL | (empty) |
4343
//! | LOG_ENTRY_COUNT | 5 |
4444
45-
use datadog_log_agent::{AggregatorService, FlusherMode, IntakeEntry, LogFlusher, LogFlusherConfig};
45+
use datadog_log_agent::{
46+
AggregatorService, Destination, IntakeEntry, LogFlusher, LogFlusherConfig,
47+
};
4648

4749
#[allow(clippy::disallowed_methods)] // plain reqwest::Client for local testing
4850
#[tokio::main]
@@ -175,11 +177,11 @@ fn now_ms() -> i64 {
175177

176178
fn describe_config(config: &LogFlusherConfig) -> (String, bool) {
177179
match &config.mode {
178-
FlusherMode::Datadog => (
180+
Destination::Datadog => (
179181
format!("https://http-intake.logs.{}/api/v2/logs", config.site),
180182
config.use_compression,
181183
),
182-
FlusherMode::ObservabilityPipelinesWorker { url } => (url.clone(), false),
184+
Destination::ObservabilityPipelinesWorker { url } => (url.clone(), false),
183185
}
184186
}
185187

crates/datadog-log-agent/src/config.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use crate::logs_additional_endpoint::{parse_additional_endpoints, LogsAdditional
88

99
/// Controls where and how logs are shipped.
1010
#[derive(Debug, Clone)]
11-
pub enum FlusherMode {
11+
pub enum Destination {
1212
/// Ship to Datadog Logs API.
1313
/// Endpoint: `https://http-intake.logs.{site}/api/v2/logs`
1414
/// Headers: `DD-API-KEY`, `DD-PROTOCOL: agent-json`, optionally `Content-Encoding: zstd`
@@ -30,7 +30,7 @@ pub struct LogFlusherConfig {
3030
pub site: String,
3131

3232
/// Flusher mode — Datadog vs Observability Pipelines Worker.
33-
pub mode: FlusherMode,
33+
pub mode: Destination,
3434

3535
/// Additional Datadog intake endpoints to ship each batch to in parallel.
3636
/// Each endpoint uses its own API key and full intake URL.
@@ -58,6 +58,7 @@ impl LogFlusherConfig {
5858
/// | `DD_FLUSH_TIMEOUT` | `5` (seconds) |
5959
/// | `DD_OBSERVABILITY_PIPELINES_WORKER_LOGS_ENABLED` | `false` |
6060
/// | `DD_OBSERVABILITY_PIPELINES_WORKER_LOGS_URL` | (none) |
61+
#[must_use]
6162
pub fn from_env() -> Self {
6263
let api_key = std::env::var("DD_API_KEY").unwrap_or_default();
6364
let site = std::env::var("DD_SITE").unwrap_or_else(|_| DEFAULT_SITE.to_string());
@@ -86,9 +87,9 @@ impl LogFlusherConfig {
8687
if url.is_empty() {
8788
tracing::warn!("OPW mode enabled but DD_OBSERVABILITY_PIPELINES_WORKER_LOGS_URL is not set — log flush will fail");
8889
}
89-
FlusherMode::ObservabilityPipelinesWorker { url }
90+
Destination::ObservabilityPipelinesWorker { url }
9091
} else {
91-
FlusherMode::Datadog
92+
Destination::Datadog
9293
};
9394

9495
Self {

crates/datadog-log-agent/src/flusher.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use tracing::{debug, error, warn};
99
use zstd::stream::write::Encoder;
1010

1111
use crate::aggregator::AggregatorHandle;
12-
use crate::config::{FlusherMode, LogFlusherConfig};
12+
use crate::config::{Destination, LogFlusherConfig};
1313
use crate::errors::FlushError;
1414

1515
/// Maximum number of send attempts before giving up on a batch.
@@ -142,11 +142,11 @@ impl LogFlusher {
142142

143143
fn resolve_endpoint(&self) -> (String, bool) {
144144
match &self.config.mode {
145-
FlusherMode::Datadog => {
145+
Destination::Datadog => {
146146
let url = format!("https://http-intake.logs.{}/api/v2/logs", self.config.site);
147147
(url, self.config.use_compression)
148148
}
149-
FlusherMode::ObservabilityPipelinesWorker { url } => {
149+
Destination::ObservabilityPipelinesWorker { url } => {
150150
// OPW does not support compression
151151
(url.clone(), false)
152152
}
@@ -179,7 +179,7 @@ impl LogFlusher {
179179
.header("DD-API-KEY", api_key)
180180
.header("Content-Type", "application/json");
181181

182-
if matches!(self.config.mode, FlusherMode::Datadog) {
182+
if matches!(self.config.mode, Destination::Datadog) {
183183
req = req.header("DD-PROTOCOL", "agent-json");
184184
}
185185

@@ -284,7 +284,7 @@ fn compress_zstd(data: &[u8], level: i32) -> Result<Vec<u8>, FlushError> {
284284
mod tests {
285285
use super::*;
286286
use crate::aggregator::AggregatorService;
287-
use crate::config::{FlusherMode, LogFlusherConfig};
287+
use crate::config::{Destination, LogFlusherConfig};
288288
use crate::intake_entry::IntakeEntry;
289289
use crate::logs_additional_endpoint::LogsAdditionalEndpoint;
290290
use mockito::Matcher;
@@ -299,7 +299,7 @@ mod tests {
299299
LogFlusherConfig {
300300
api_key: "test-api-key".to_string(),
301301
site: "datadoghq.com".to_string(),
302-
mode: FlusherMode::ObservabilityPipelinesWorker {
302+
mode: Destination::ObservabilityPipelinesWorker {
303303
url: format!("{mock_url}/api/v2/logs"),
304304
},
305305
additional_endpoints: Vec::new(),
@@ -348,7 +348,7 @@ mod tests {
348348
let config = LogFlusherConfig {
349349
api_key: "test-api-key".to_string(),
350350
site: "datadoghq.com".to_string(),
351-
mode: FlusherMode::Datadog,
351+
mode: Destination::Datadog,
352352
additional_endpoints: Vec::new(),
353353
use_compression: false,
354354
compression_level: 3,
@@ -391,7 +391,7 @@ mod tests {
391391
let config = LogFlusherConfig {
392392
api_key: "test-api-key".to_string(),
393393
site: "unused".to_string(),
394-
mode: FlusherMode::ObservabilityPipelinesWorker { url: opw_url },
394+
mode: Destination::ObservabilityPipelinesWorker { url: opw_url },
395395
additional_endpoints: Vec::new(),
396396
use_compression: false,
397397
compression_level: 3,
@@ -535,7 +535,7 @@ mod tests {
535535
let config = LogFlusherConfig {
536536
api_key: "key".to_string(),
537537
site: "datadoghq.com".to_string(),
538-
mode: FlusherMode::ObservabilityPipelinesWorker {
538+
mode: Destination::ObservabilityPipelinesWorker {
539539
url: format!("{}/api/v2/logs", primary.url()),
540540
},
541541
additional_endpoints: vec![
@@ -610,7 +610,7 @@ mod tests {
610610
let config = LogFlusherConfig {
611611
api_key: "key".to_string(),
612612
site: "datadoghq.com".to_string(),
613-
mode: FlusherMode::ObservabilityPipelinesWorker {
613+
mode: Destination::ObservabilityPipelinesWorker {
614614
url: format!("{}/api/v2/logs", primary.url()),
615615
},
616616
additional_endpoints: vec![
@@ -712,7 +712,7 @@ mod tests {
712712
let config = LogFlusherConfig {
713713
api_key: "key".to_string(),
714714
site: "datadoghq.com".to_string(),
715-
mode: FlusherMode::ObservabilityPipelinesWorker {
715+
mode: Destination::ObservabilityPipelinesWorker {
716716
url: format!("{}/api/v2/logs", primary.url()),
717717
},
718718
additional_endpoints: vec![LogsAdditionalEndpoint {

crates/datadog-log-agent/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ pub mod server;
1919

2020
// Re-export the most commonly used types at the crate root
2121
pub use aggregator::{AggregatorHandle, AggregatorService};
22-
pub use config::{FlusherMode, LogFlusherConfig};
22+
pub use config::{Destination, LogFlusherConfig};
2323
pub use flusher::LogFlusher;
2424
pub use intake_entry::IntakeEntry;
2525
pub use logs_additional_endpoint::LogsAdditionalEndpoint;

crates/datadog-log-agent/tests/integration_test.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,13 @@
1212
//! HTTP POST → `LogServer` → `AggregatorHandle::insert_batch` → `LogFlusher::flush` → HTTP endpoint
1313
//!
1414
//! HTTP traffic is directed to a local `mockito` server via
15-
//! `FlusherMode::ObservabilityPipelinesWorker`, which accepts a direct URL.
15+
//! `Destination::ObservabilityPipelinesWorker`, which accepts a direct URL.
1616
//! Datadog-mode-specific headers (`DD-PROTOCOL`) are covered by unit tests in `flusher.rs`.
1717
1818
#![allow(clippy::disallowed_methods, clippy::unwrap_used, clippy::expect_used)]
1919

2020
use datadog_log_agent::{
21-
AggregatorService, FlusherMode, IntakeEntry, LogFlusher, LogFlusherConfig, LogServer,
21+
AggregatorService, Destination, IntakeEntry, LogFlusher, LogFlusherConfig, LogServer,
2222
LogServerConfig, LogsAdditionalEndpoint,
2323
};
2424
use mockito::{Matcher, Server};
@@ -38,7 +38,7 @@ fn opw_config(mock_url: &str) -> LogFlusherConfig {
3838
LogFlusherConfig {
3939
api_key: "test-api-key".to_string(),
4040
site: "ignored.datadoghq.com".to_string(),
41-
mode: FlusherMode::ObservabilityPipelinesWorker {
41+
mode: Destination::ObservabilityPipelinesWorker {
4242
url: format!("{}/logs", mock_url),
4343
},
4444
additional_endpoints: Vec::new(),
@@ -415,7 +415,7 @@ async fn test_opw_mode_uses_custom_url_and_omits_dd_protocol() {
415415
let config = LogFlusherConfig {
416416
api_key: "test-api-key".to_string(),
417417
site: "ignored".to_string(),
418-
mode: FlusherMode::ObservabilityPipelinesWorker {
418+
mode: Destination::ObservabilityPipelinesWorker {
419419
url: format!("{}{}", server.url(), opw_path),
420420
},
421421
additional_endpoints: Vec::new(),
@@ -461,7 +461,7 @@ async fn test_opw_mode_disables_compression_regardless_of_config() {
461461
let config = LogFlusherConfig {
462462
api_key: "key".to_string(),
463463
site: "ignored".to_string(),
464-
mode: FlusherMode::ObservabilityPipelinesWorker {
464+
mode: Destination::ObservabilityPipelinesWorker {
465465
url: format!("{}/logs", server.url()),
466466
},
467467
additional_endpoints: Vec::new(),
@@ -600,7 +600,7 @@ async fn test_additional_endpoints_receive_same_batch() {
600600
let config = LogFlusherConfig {
601601
api_key: "test-api-key".to_string(),
602602
site: "ignored".to_string(),
603-
mode: FlusherMode::ObservabilityPipelinesWorker {
603+
mode: Destination::ObservabilityPipelinesWorker {
604604
url: format!("{}/logs", primary.url()),
605605
},
606606
additional_endpoints: vec![LogsAdditionalEndpoint {
@@ -648,7 +648,7 @@ async fn test_additional_endpoint_failure_does_not_affect_return_value() {
648648
let config = LogFlusherConfig {
649649
api_key: "key".to_string(),
650650
site: "ignored".to_string(),
651-
mode: FlusherMode::ObservabilityPipelinesWorker {
651+
mode: Destination::ObservabilityPipelinesWorker {
652652
url: format!("{}/logs", primary.url()),
653653
},
654654
additional_endpoints: vec![LogsAdditionalEndpoint {

crates/datadog-serverless-compat/src/main.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ use libdd_trace_utils::{config_utils::read_cloud_env, trace_utils::EnvironmentTy
2828
use datadog_fips::reqwest_adapter::create_reqwest_client_builder;
2929
use datadog_log_agent::{
3030
AggregatorHandle as LogAggregatorHandle, AggregatorService as LogAggregatorService,
31-
FlusherMode as LogFlusherMode, LogFlusher, LogFlusherConfig, LogServer, LogServerConfig,
31+
Destination as LogDestination, LogFlusher, LogFlusherConfig, LogServer, LogServerConfig,
3232
};
3333
use dogstatsd::{
3434
aggregator::{AggregatorHandle, AggregatorService},
@@ -405,7 +405,7 @@ fn start_log_agent(
405405
};
406406

407407
// Fail fast: OPW mode with an empty URL will always produce a network error at flush time.
408-
if let LogFlusherMode::ObservabilityPipelinesWorker { url } = &config.mode {
408+
if let LogDestination::ObservabilityPipelinesWorker { url } = &config.mode {
409409
if url.is_empty() {
410410
error!("OPW mode enabled but DD_OBSERVABILITY_PIPELINES_WORKER_LOGS_URL is empty — log agent disabled");
411411
return None;

0 commit comments

Comments
 (0)