@@ -9,7 +9,7 @@ use tracing::{debug, error, warn};
99use zstd:: stream:: write:: Encoder ;
1010
1111use crate :: aggregator:: AggregatorHandle ;
12- use crate :: config:: { FlusherMode , LogFlusherConfig } ;
12+ use crate :: config:: { Destination , LogFlusherConfig } ;
1313use crate :: errors:: FlushError ;
1414
1515/// Maximum number of send attempts before giving up on a batch.
@@ -142,11 +142,11 @@ impl LogFlusher {
142142
143143 fn resolve_endpoint ( & self ) -> ( String , bool ) {
144144 match & self . config . mode {
145- FlusherMode :: Datadog => {
145+ Destination :: Datadog => {
146146 let url = format ! ( "https://http-intake.logs.{}/api/v2/logs" , self . config. site) ;
147147 ( url, self . config . use_compression )
148148 }
149- FlusherMode :: ObservabilityPipelinesWorker { url } => {
149+ Destination :: ObservabilityPipelinesWorker { url } => {
150150 // OPW does not support compression
151151 ( url. clone ( ) , false )
152152 }
@@ -179,7 +179,7 @@ impl LogFlusher {
179179 . header ( "DD-API-KEY" , api_key)
180180 . header ( "Content-Type" , "application/json" ) ;
181181
182- if matches ! ( self . config. mode, FlusherMode :: Datadog ) {
182+ if matches ! ( self . config. mode, Destination :: Datadog ) {
183183 req = req. header ( "DD-PROTOCOL" , "agent-json" ) ;
184184 }
185185
@@ -284,7 +284,7 @@ fn compress_zstd(data: &[u8], level: i32) -> Result<Vec<u8>, FlushError> {
284284mod tests {
285285 use super :: * ;
286286 use crate :: aggregator:: AggregatorService ;
287- use crate :: config:: { FlusherMode , LogFlusherConfig } ;
287+ use crate :: config:: { Destination , LogFlusherConfig } ;
288288 use crate :: intake_entry:: IntakeEntry ;
289289 use crate :: logs_additional_endpoint:: LogsAdditionalEndpoint ;
290290 use mockito:: Matcher ;
@@ -299,7 +299,7 @@ mod tests {
299299 LogFlusherConfig {
300300 api_key : "test-api-key" . to_string ( ) ,
301301 site : "datadoghq.com" . to_string ( ) ,
302- mode : FlusherMode :: ObservabilityPipelinesWorker {
302+ mode : Destination :: ObservabilityPipelinesWorker {
303303 url : format ! ( "{mock_url}/api/v2/logs" ) ,
304304 } ,
305305 additional_endpoints : Vec :: new ( ) ,
@@ -348,7 +348,7 @@ mod tests {
348348 let config = LogFlusherConfig {
349349 api_key : "test-api-key" . to_string ( ) ,
350350 site : "datadoghq.com" . to_string ( ) ,
351- mode : FlusherMode :: Datadog ,
351+ mode : Destination :: Datadog ,
352352 additional_endpoints : Vec :: new ( ) ,
353353 use_compression : false ,
354354 compression_level : 3 ,
@@ -391,7 +391,7 @@ mod tests {
391391 let config = LogFlusherConfig {
392392 api_key : "test-api-key" . to_string ( ) ,
393393 site : "unused" . to_string ( ) ,
394- mode : FlusherMode :: ObservabilityPipelinesWorker { url : opw_url } ,
394+ mode : Destination :: ObservabilityPipelinesWorker { url : opw_url } ,
395395 additional_endpoints : Vec :: new ( ) ,
396396 use_compression : false ,
397397 compression_level : 3 ,
@@ -535,7 +535,7 @@ mod tests {
535535 let config = LogFlusherConfig {
536536 api_key : "key" . to_string ( ) ,
537537 site : "datadoghq.com" . to_string ( ) ,
538- mode : FlusherMode :: ObservabilityPipelinesWorker {
538+ mode : Destination :: ObservabilityPipelinesWorker {
539539 url : format ! ( "{}/api/v2/logs" , primary. url( ) ) ,
540540 } ,
541541 additional_endpoints : vec ! [
@@ -610,7 +610,7 @@ mod tests {
610610 let config = LogFlusherConfig {
611611 api_key : "key" . to_string ( ) ,
612612 site : "datadoghq.com" . to_string ( ) ,
613- mode : FlusherMode :: ObservabilityPipelinesWorker {
613+ mode : Destination :: ObservabilityPipelinesWorker {
614614 url : format ! ( "{}/api/v2/logs" , primary. url( ) ) ,
615615 } ,
616616 additional_endpoints : vec ! [
@@ -712,7 +712,7 @@ mod tests {
712712 let config = LogFlusherConfig {
713713 api_key : "key" . to_string ( ) ,
714714 site : "datadoghq.com" . to_string ( ) ,
715- mode : FlusherMode :: ObservabilityPipelinesWorker {
715+ mode : Destination :: ObservabilityPipelinesWorker {
716716 url : format ! ( "{}/api/v2/logs" , primary. url( ) ) ,
717717 } ,
718718 additional_endpoints : vec ! [ LogsAdditionalEndpoint {
0 commit comments