diff --git a/CHANGELOG.md b/CHANGELOG.md index dac90bf15f..cb8ae6f90f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased +- `opentelemetry-sdk`: Add `create_meter_provider`/`configure_meter_provider` to declarative file configuration, enabling MeterProvider instantiation from config files without reading env vars + ([#4987](https://github.com/open-telemetry/opentelemetry-python/pull/4987)) +- `opentelemetry-sdk`: Add `create_resource` and `create_propagator`/`configure_propagator` to declarative file configuration, enabling Resource and propagator instantiation from config files without reading env vars + ([#4979](https://github.com/open-telemetry/opentelemetry-python/pull/4979)) - `opentelemetry-sdk`: Add file configuration support with YAML/JSON loading, environment variable substitution, and schema validation against the vendored OTel config JSON schema ([#4898](https://github.com/open-telemetry/opentelemetry-python/pull/4898)) - Fix intermittent CI failures in `getting-started` and `tracecontext` jobs caused by GitHub git CDN SHA propagation lag by installing contrib packages from the already-checked-out local copy instead of a second git clone diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_exceptions.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_exceptions.py new file mode 100644 index 0000000000..9b90dbd50a --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_exceptions.py @@ -0,0 +1,25 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class ConfigurationError(Exception): + """Raised when configuration loading, parsing, validation, or instantiation fails. + + This includes errors from: + - File not found or inaccessible + - Invalid YAML/JSON syntax + - Schema validation failures + - Environment variable substitution errors + - Missing required SDK extensions (e.g., propagator packages not installed) + """ diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_meter_provider.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_meter_provider.py new file mode 100644 index 0000000000..317e9e4889 --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_meter_provider.py @@ -0,0 +1,512 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import logging +from typing import Dict, List, Optional, Set, Type + +from opentelemetry import metrics +from opentelemetry.sdk._configuration._exceptions import ConfigurationError +from opentelemetry.sdk._configuration.models import ( + Aggregation as AggregationConfig, +) +from opentelemetry.sdk._configuration.models import ( + ConsoleMetricExporter as ConsoleMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + ExemplarFilter as ExemplarFilterConfig, +) +from opentelemetry.sdk._configuration.models import ( + ExporterDefaultHistogramAggregation, + ExporterTemporalityPreference, + InstrumentType, +) +from opentelemetry.sdk._configuration.models import ( + MeterProvider as MeterProviderConfig, +) +from opentelemetry.sdk._configuration.models import ( + MetricReader as MetricReaderConfig, +) +from opentelemetry.sdk._configuration.models import ( + OtlpGrpcMetricExporter as OtlpGrpcMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + OtlpHttpMetricExporter as OtlpHttpMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + PeriodicMetricReader as PeriodicMetricReaderConfig, +) +from opentelemetry.sdk._configuration.models import ( + PushMetricExporter as PushMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + View as ViewConfig, +) +from opentelemetry.sdk.metrics import ( + AlwaysOffExemplarFilter, + AlwaysOnExemplarFilter, + Counter, + Histogram, + MeterProvider, + ObservableCounter, + ObservableGauge, + ObservableUpDownCounter, + TraceBasedExemplarFilter, + UpDownCounter, + _Gauge, +) +from opentelemetry.sdk.metrics.export import ( + AggregationTemporality, + ConsoleMetricExporter, + MetricExporter, + MetricReader, + PeriodicExportingMetricReader, +) +from opentelemetry.sdk.metrics.view import ( + Aggregation, + DefaultAggregation, + DropAggregation, + ExplicitBucketHistogramAggregation, + ExponentialBucketHistogramAggregation, + LastValueAggregation, + SumAggregation, + View, +) +from opentelemetry.sdk.resources import Resource + +_logger = logging.getLogger(__name__) + + +def _parse_headers( + headers: Optional[list], + headers_list: Optional[str], +) -> Optional[Dict[str, str]]: + """Merge headers struct and headers_list into a dict. + + Returns None if neither is set, letting the exporter read env vars. + headers struct takes priority over headers_list for the same key. + """ + if headers is None and headers_list is None: + return None + result: Dict[str, str] = {} + if headers_list: + for item in headers_list.split(","): + item = item.strip() + if "=" in item: + key, value = item.split("=", 1) + result[key.strip()] = value.strip() + elif item: + _logger.warning( + "Invalid header pair in headers_list (missing '='): %s", + item, + ) + if headers: + for pair in headers: + result[pair.name] = pair.value or "" + return result + + +# Default interval/timeout per OTel spec (milliseconds). +_DEFAULT_EXPORT_INTERVAL_MILLIS = 60000 +_DEFAULT_EXPORT_TIMEOUT_MILLIS = 30000 + +# Instrument type → SDK instrument class mapping (for View selectors). +_INSTRUMENT_TYPE_MAP: Dict[InstrumentType, Type] = { + InstrumentType.counter: Counter, + InstrumentType.up_down_counter: UpDownCounter, + InstrumentType.histogram: Histogram, + InstrumentType.gauge: _Gauge, + InstrumentType.observable_counter: ObservableCounter, + InstrumentType.observable_gauge: ObservableGauge, + InstrumentType.observable_up_down_counter: ObservableUpDownCounter, +} + + +def _map_temporality( + pref: Optional[ExporterTemporalityPreference], +) -> Dict[type, AggregationTemporality]: + """Map a temporality preference to an explicit preferred_temporality dict. + + Always returns an explicit dict to suppress OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE. + Default (None or cumulative) → all instruments CUMULATIVE. + """ + if pref is None or pref == ExporterTemporalityPreference.cumulative: + return { + Counter: AggregationTemporality.CUMULATIVE, + UpDownCounter: AggregationTemporality.CUMULATIVE, + Histogram: AggregationTemporality.CUMULATIVE, + ObservableCounter: AggregationTemporality.CUMULATIVE, + ObservableUpDownCounter: AggregationTemporality.CUMULATIVE, + ObservableGauge: AggregationTemporality.CUMULATIVE, + } + if pref == ExporterTemporalityPreference.delta: + return { + Counter: AggregationTemporality.DELTA, + UpDownCounter: AggregationTemporality.CUMULATIVE, + Histogram: AggregationTemporality.DELTA, + ObservableCounter: AggregationTemporality.DELTA, + ObservableUpDownCounter: AggregationTemporality.CUMULATIVE, + ObservableGauge: AggregationTemporality.CUMULATIVE, + } + if pref == ExporterTemporalityPreference.low_memory: + return { + Counter: AggregationTemporality.DELTA, + UpDownCounter: AggregationTemporality.CUMULATIVE, + Histogram: AggregationTemporality.DELTA, + ObservableCounter: AggregationTemporality.CUMULATIVE, + ObservableUpDownCounter: AggregationTemporality.CUMULATIVE, + ObservableGauge: AggregationTemporality.CUMULATIVE, + } + raise ConfigurationError( + f"Unsupported temporality preference '{pref}'. " + "Supported values: cumulative, delta, low_memory." + ) + + +def _map_histogram_aggregation( + pref: Optional[ExporterDefaultHistogramAggregation], +) -> Dict[type, Aggregation]: + """Map a histogram aggregation preference to an explicit preferred_aggregation dict. + + Always returns an explicit dict to suppress + OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION. + Default (None or explicit_bucket_histogram) → ExplicitBucketHistogramAggregation. + """ + if ( + pref is None + or pref + == ExporterDefaultHistogramAggregation.explicit_bucket_histogram + ): + return {Histogram: ExplicitBucketHistogramAggregation()} + if ( + pref + == ExporterDefaultHistogramAggregation.base2_exponential_bucket_histogram + ): + return {Histogram: ExponentialBucketHistogramAggregation()} + raise ConfigurationError( + f"Unsupported default histogram aggregation '{pref}'. " + "Supported values: explicit_bucket_histogram, base2_exponential_bucket_histogram." + ) + + +def _create_aggregation(config: AggregationConfig) -> Aggregation: + """Create an SDK Aggregation from config, passing through detail parameters.""" + if config.default is not None: + return DefaultAggregation() + if config.drop is not None: + return DropAggregation() + if config.explicit_bucket_histogram is not None: + return ExplicitBucketHistogramAggregation( + boundaries=config.explicit_bucket_histogram.boundaries, + record_min_max=( + config.explicit_bucket_histogram.record_min_max + if config.explicit_bucket_histogram.record_min_max is not None + else True + ), + ) + if config.base2_exponential_bucket_histogram is not None: + kwargs = {} + if config.base2_exponential_bucket_histogram.max_size is not None: + kwargs["max_size"] = ( + config.base2_exponential_bucket_histogram.max_size + ) + if config.base2_exponential_bucket_histogram.max_scale is not None: + kwargs["max_scale"] = ( + config.base2_exponential_bucket_histogram.max_scale + ) + return ExponentialBucketHistogramAggregation(**kwargs) + if config.last_value is not None: + return LastValueAggregation() + if config.sum is not None: + return SumAggregation() + raise ConfigurationError( + f"Unknown or unsupported aggregation type in config: {config!r}. " + "Supported types: default, drop, explicit_bucket_histogram, " + "base2_exponential_bucket_histogram, last_value, sum." + ) + + +def _create_view(config: ViewConfig) -> View: + """Create an SDK View from config.""" + selector = config.selector + stream = config.stream + + instrument_type = None + if selector.instrument_type is not None: + instrument_type = _INSTRUMENT_TYPE_MAP.get(selector.instrument_type) + if instrument_type is None: + raise ConfigurationError( + f"Unknown instrument type: {selector.instrument_type!r}" + ) + + attribute_keys: Optional[Set[str]] = None + if stream.attribute_keys is not None: + if stream.attribute_keys.excluded: + _logger.warning( + "attribute_keys.excluded is not supported by the Python SDK View; " + "the exclusion list will be ignored." + ) + if stream.attribute_keys.included is not None: + attribute_keys = set(stream.attribute_keys.included) + + aggregation = None + if stream.aggregation is not None: + aggregation = _create_aggregation(stream.aggregation) + + return View( + instrument_type=instrument_type, + instrument_name=selector.instrument_name, + meter_name=selector.meter_name, + meter_version=selector.meter_version, + meter_schema_url=selector.meter_schema_url, + instrument_unit=selector.unit, + name=stream.name, + description=stream.description, + attribute_keys=attribute_keys, + aggregation=aggregation, + ) + + +def _create_console_metric_exporter( + config: ConsoleMetricExporterConfig, +) -> MetricExporter: + """Create a ConsoleMetricExporter from config.""" + preferred_temporality = _map_temporality(config.temporality_preference) + preferred_aggregation = _map_histogram_aggregation( + config.default_histogram_aggregation + ) + return ConsoleMetricExporter( + preferred_temporality=preferred_temporality, + preferred_aggregation=preferred_aggregation, + ) + + +def _map_compression_metric( + value: Optional[str], compression_enum: type +) -> Optional[object]: + """Map a compression string to the given Compression enum value.""" + if value is None or value.lower() == "none": + return None + if value.lower() == "gzip": + return compression_enum.Gzip # type: ignore[attr-defined] + raise ConfigurationError( + f"Unsupported compression value '{value}'. Supported values: 'gzip', 'none'." + ) + + +def _create_otlp_http_metric_exporter( + config: OtlpHttpMetricExporterConfig, +) -> MetricExporter: + """Create an OTLP HTTP metric exporter from config.""" + try: + # pylint: disable=import-outside-toplevel,no-name-in-module + from opentelemetry.exporter.otlp.proto.http import ( # type: ignore[import-untyped] # noqa: PLC0415 + Compression, + ) + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( # type: ignore[import-untyped] # noqa: PLC0415 + OTLPMetricExporter, + ) + except ImportError as exc: + raise ConfigurationError( + "otlp_http metric exporter requires 'opentelemetry-exporter-otlp-proto-http'. " + "Install it with: pip install opentelemetry-exporter-otlp-proto-http" + ) from exc + + compression = _map_compression_metric(config.compression, Compression) + headers = _parse_headers(config.headers, config.headers_list) + timeout = (config.timeout / 1000.0) if config.timeout is not None else None + preferred_temporality = _map_temporality(config.temporality_preference) + preferred_aggregation = _map_histogram_aggregation( + config.default_histogram_aggregation + ) + + return OTLPMetricExporter( # type: ignore[return-value] + endpoint=config.endpoint, + headers=headers, + timeout=timeout, + compression=compression, # type: ignore[arg-type] + preferred_temporality=preferred_temporality, + preferred_aggregation=preferred_aggregation, + ) + + +def _create_otlp_grpc_metric_exporter( + config: OtlpGrpcMetricExporterConfig, +) -> MetricExporter: + """Create an OTLP gRPC metric exporter from config.""" + try: + # pylint: disable=import-outside-toplevel,no-name-in-module + import grpc # type: ignore[import-untyped] # noqa: PLC0415 + + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( # type: ignore[import-untyped] # noqa: PLC0415 + OTLPMetricExporter, + ) + except ImportError as exc: + raise ConfigurationError( + "otlp_grpc metric exporter requires 'opentelemetry-exporter-otlp-proto-grpc'. " + "Install it with: pip install opentelemetry-exporter-otlp-proto-grpc" + ) from exc + + compression = _map_compression_metric(config.compression, grpc.Compression) + headers = _parse_headers(config.headers, config.headers_list) + timeout = (config.timeout / 1000.0) if config.timeout is not None else None + preferred_temporality = _map_temporality(config.temporality_preference) + preferred_aggregation = _map_histogram_aggregation( + config.default_histogram_aggregation + ) + + return OTLPMetricExporter( # type: ignore[return-value] + endpoint=config.endpoint, + headers=headers, + timeout=timeout, + compression=compression, # type: ignore[arg-type] + preferred_temporality=preferred_temporality, + preferred_aggregation=preferred_aggregation, + ) + + +def _create_push_metric_exporter( + config: PushMetricExporterConfig, +) -> MetricExporter: + """Create a push metric exporter from config.""" + if config.console is not None: + return _create_console_metric_exporter(config.console) + if config.otlp_http is not None: + return _create_otlp_http_metric_exporter(config.otlp_http) + if config.otlp_grpc is not None: + return _create_otlp_grpc_metric_exporter(config.otlp_grpc) + if config.otlp_file_development is not None: + raise ConfigurationError( + "otlp_file_development metric exporter is experimental and not yet supported." + ) + raise ConfigurationError( + "No exporter type specified in push metric exporter config. " + "Supported types: console, otlp_http, otlp_grpc." + ) + + +def _create_periodic_metric_reader( + config: PeriodicMetricReaderConfig, +) -> PeriodicExportingMetricReader: + """Create a PeriodicExportingMetricReader from config. + + Passes explicit interval/timeout defaults to suppress env var reading. + """ + exporter = _create_push_metric_exporter(config.exporter) + interval = ( + config.interval + if config.interval is not None + else _DEFAULT_EXPORT_INTERVAL_MILLIS + ) + timeout = ( + config.timeout + if config.timeout is not None + else _DEFAULT_EXPORT_TIMEOUT_MILLIS + ) + return PeriodicExportingMetricReader( + exporter=exporter, + export_interval_millis=float(interval), + export_timeout_millis=float(timeout), + ) + + +def _create_metric_reader(config: MetricReaderConfig) -> MetricReader: + """Create a MetricReader from config.""" + if config.periodic is not None: + return _create_periodic_metric_reader(config.periodic) + if config.pull is not None: + raise ConfigurationError( + "Pull metric readers (e.g. Prometheus) are experimental and not yet supported " + "by declarative config. Use the SDK API directly to configure pull readers." + ) + raise ConfigurationError( + "No reader type specified in metric reader config. " + "Supported types: periodic." + ) + + +def _create_exemplar_filter( + value: ExemplarFilterConfig, +) -> object: + """Create an SDK exemplar filter from config enum value.""" + if value == ExemplarFilterConfig.always_on: + return AlwaysOnExemplarFilter() + if value == ExemplarFilterConfig.always_off: + return AlwaysOffExemplarFilter() + if value == ExemplarFilterConfig.trace_based: + return TraceBasedExemplarFilter() + raise ConfigurationError( + f"Unknown exemplar filter value: {value!r}. " + "Supported values: always_on, always_off, trace_based." + ) + + +def create_meter_provider( + config: Optional[MeterProviderConfig], + resource: Optional[Resource] = None, +) -> MeterProvider: + """Create an SDK MeterProvider from declarative config. + + Does NOT read OTEL_METRIC_EXPORT_INTERVAL, OTEL_METRICS_EXEMPLAR_FILTER, + or any other env vars for values explicitly controlled by the config. + Absent config values use OTel spec defaults, matching Java SDK behavior. + + Args: + config: MeterProvider config from the parsed config file, or None. + resource: Resource to attach to the provider. + + Returns: + A configured MeterProvider. + """ + # Always pass an explicit exemplar filter to suppress env var reading. + # Spec default is trace_based. + exemplar_filter: object = TraceBasedExemplarFilter() + if config is not None and config.exemplar_filter is not None: + exemplar_filter = _create_exemplar_filter(config.exemplar_filter) + + readers: List[MetricReader] = [] + views: List[View] = [] + + if config is not None: + for reader_config in config.readers: + readers.append(_create_metric_reader(reader_config)) + if config.views: + for view_config in config.views: + views.append(_create_view(view_config)) + + return MeterProvider( + resource=resource, + metric_readers=readers, + exemplar_filter=exemplar_filter, # type: ignore[arg-type] + views=views, + ) + + +def configure_meter_provider( + config: Optional[MeterProviderConfig], + resource: Optional[Resource] = None, +) -> None: + """Configure the global MeterProvider from declarative config. + + When config is None (meter_provider section absent from config file), + the global is not set — matching Java/JS SDK behavior. + + Args: + config: MeterProvider config from the parsed config file, or None. + resource: Resource to attach to the provider. + """ + if config is None: + return + metrics.set_meter_provider(create_meter_provider(config, resource)) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_propagator.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_propagator.py new file mode 100644 index 0000000000..1c49d05651 --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_propagator.py @@ -0,0 +1,125 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import logging +from typing import Optional + +from opentelemetry.baggage.propagation import W3CBaggagePropagator +from opentelemetry.propagate import set_global_textmap +from opentelemetry.propagators.composite import CompositePropagator +from opentelemetry.propagators.textmap import TextMapPropagator +from opentelemetry.sdk._configuration._exceptions import ConfigurationError +from opentelemetry.sdk._configuration.models import ( + Propagator as PropagatorConfig, +) +from opentelemetry.sdk._configuration.models import ( + TextMapPropagator as TextMapPropagatorConfig, +) +from opentelemetry.trace.propagation.tracecontext import ( + TraceContextTextMapPropagator, +) +from opentelemetry.util._importlib_metadata import entry_points + +_logger = logging.getLogger(__name__) + + +def _load_entry_point_propagator(name: str) -> TextMapPropagator: + """Load a propagator by name from the opentelemetry_propagator entry point group.""" + try: + eps = list(entry_points(group="opentelemetry_propagator", name=name)) + if not eps: + raise ConfigurationError( + f"Propagator '{name}' not found. " + "It may not be installed or may be misspelled." + ) + return eps[0].load()() + except ConfigurationError: + raise + except Exception as exc: + raise ConfigurationError( + f"Failed to load propagator '{name}': {exc}" + ) from exc + + +def _propagators_from_textmap_config( + config: TextMapPropagatorConfig, +) -> list[TextMapPropagator]: + """Resolve a single TextMapPropagator config entry to a list of propagators.""" + result: list[TextMapPropagator] = [] + if config.tracecontext is not None: + result.append(TraceContextTextMapPropagator()) + if config.baggage is not None: + result.append(W3CBaggagePropagator()) + if config.b3 is not None: + result.append(_load_entry_point_propagator("b3")) + if config.b3multi is not None: + result.append(_load_entry_point_propagator("b3multi")) + return result + + +def create_propagator( + config: Optional[PropagatorConfig], +) -> CompositePropagator: + """Create a CompositePropagator from declarative config. + + If config is None or has no propagators defined, returns an empty + CompositePropagator (no-op), ensuring "what you see is what you get" + semantics — the env-var-based default propagators are not used. + + Args: + config: Propagator config from the parsed config file, or None. + + Returns: + A CompositePropagator wrapping all configured propagators. + """ + if config is None: + return CompositePropagator([]) + + propagators: list[TextMapPropagator] = [] + seen_types: set[type] = set() + + def _add_deduped(propagator: TextMapPropagator) -> None: + if type(propagator) not in seen_types: + seen_types.add(type(propagator)) + propagators.append(propagator) + + # Process structured composite list + if config.composite: + for entry in config.composite: + for propagator in _propagators_from_textmap_config(entry): + _add_deduped(propagator) + + # Process composite_list (comma-separated propagator names via entry_points) + if config.composite_list: + for name in config.composite_list.split(","): + name = name.strip() + if not name or name.lower() == "none": + continue + _add_deduped(_load_entry_point_propagator(name)) + + return CompositePropagator(propagators) + + +def configure_propagator(config: Optional[PropagatorConfig]) -> None: + """Configure the global text map propagator from declarative config. + + Always calls set_global_textmap to override any defaults (including the + env-var-based tracecontext+baggage default set by the SDK). + + Args: + config: Propagator config from the parsed config file, or None. + """ + set_global_textmap(create_propagator(config)) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_resource.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_resource.py new file mode 100644 index 0000000000..d58bd4d31d --- /dev/null +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/_resource.py @@ -0,0 +1,184 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import fnmatch +import logging +from typing import Callable, Optional +from urllib import parse + +from opentelemetry.sdk._configuration.models import ( + AttributeNameValue, + AttributeType, + ExperimentalResourceDetector, + IncludeExclude, +) +from opentelemetry.sdk._configuration.models import Resource as ResourceConfig +from opentelemetry.sdk.resources import ( + _DEFAULT_RESOURCE, + SERVICE_NAME, + Resource, +) + +_logger = logging.getLogger(__name__) + + +def _coerce_bool(value: object) -> bool: + if isinstance(value, str): + return value.lower() not in ("false", "0", "") + return bool(value) + + +def _array(coerce: Callable) -> Callable: + return lambda value: [coerce(item) for item in value] + + +# Dispatch table mapping AttributeType to its coercion callable +_COERCIONS = { + AttributeType.string: str, + AttributeType.int: int, + AttributeType.double: float, + AttributeType.bool: _coerce_bool, + AttributeType.string_array: _array(str), + AttributeType.int_array: _array(int), + AttributeType.double_array: _array(float), + AttributeType.bool_array: _array(_coerce_bool), +} + + +def _coerce_attribute_value(attr: AttributeNameValue) -> object: + """Coerce an attribute value to the correct Python type based on AttributeType.""" + coerce = _COERCIONS.get(attr.type) # type: ignore[arg-type] + return coerce(attr.value) if coerce is not None else attr.value # type: ignore[operator] + + +def _parse_attributes_list(attributes_list: str) -> dict[str, str]: + """Parse a comma-separated key=value string into a dict. + + Format is the same as OTEL_RESOURCE_ATTRIBUTES: key=value,key=value + Values are always strings (no type coercion). + """ + result: dict[str, str] = {} + for item in attributes_list.split(","): + item = item.strip() + if not item: + continue + if "=" not in item: + _logger.warning( + "Invalid resource attribute pair in attributes_list: %s", + item, + ) + continue + key, value = item.split("=", maxsplit=1) + result[key.strip()] = parse.unquote(value.strip()) + return result + + +def create_resource(config: Optional[ResourceConfig]) -> Resource: + """Create an SDK Resource from declarative config. + + Does NOT read OTEL_RESOURCE_ATTRIBUTES. Resource detectors are only run + when explicitly listed under detection_development.detectors in the config. + Starts from SDK telemetry defaults (telemetry.sdk.*), merges any detected + attributes, then merges explicit config attributes on top (highest priority). + + Args: + config: Resource config from the parsed config file, or None. + + Returns: + A Resource with SDK defaults, optional detector attributes, and any + config-specified attributes merged in priority order. + """ + # Spec requires service.name to always be present; detectors and explicit + # config attributes can override this default. + base = _DEFAULT_RESOURCE.merge(Resource({SERVICE_NAME: "unknown_service"})) + + if config is None: + return base + + # attributes_list is lower priority; explicit attributes overwrite conflicts. + config_attrs: dict[str, object] = {} + if config.attributes_list: + config_attrs.update(_parse_attributes_list(config.attributes_list)) + + if config.attributes: + for attr in config.attributes: + config_attrs[attr.name] = _coerce_attribute_value(attr) + + schema_url = config.schema_url + + # Run detectors only if detection_development is configured. Collect all + # detected attributes, apply the include/exclude filter, then merge before + # config attributes so explicit values always win. + result = base + if config.detection_development: + detected_attrs: dict[str, object] = {} + if config.detection_development.detectors: + for detector_config in config.detection_development.detectors: + _run_detectors(detector_config, detected_attrs) + + filtered = _filter_attributes( + detected_attrs, config.detection_development.attributes + ) + if filtered: + result = result.merge(Resource(filtered)) # type: ignore[arg-type] + + config_resource = Resource(config_attrs, schema_url) # type: ignore[arg-type] + return result.merge(config_resource) + + +def _run_detectors( + detector_config: ExperimentalResourceDetector, + detected_attrs: dict[str, object], +) -> None: + """Run any detectors present in a single detector config entry. + + Each detector PR adds its own branch here. The detected_attrs dict + is updated in-place; later detectors overwrite earlier ones for the + same key. + """ + + +def _filter_attributes( + attrs: dict[str, object], filter_config: Optional[IncludeExclude] +) -> dict[str, object]: + """Filter detected attribute keys using include/exclude glob patterns. + + Mirrors other SDK IncludeExcludePredicate.createPatternMatching behaviour: + - No filter config (attributes absent) → include all detected attributes. + - included patterns are checked first; excluded patterns are applied after. + - An empty included list is treated as "include everything". + """ + if filter_config is None: + return attrs + + included = filter_config.included + excluded = filter_config.excluded + + if not included and not excluded: + return attrs + + effective_included = included if included else None # [] → include all + + result: dict[str, object] = {} + for key, value in attrs.items(): + if effective_included is not None and not any( + fnmatch.fnmatch(key, pat) for pat in effective_included + ): + continue + if excluded and any(fnmatch.fnmatch(key, pat) for pat in excluded): + continue + result[key] = value + return result diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/__init__.py index a995539749..facabae4fd 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/__init__.py @@ -24,6 +24,15 @@ '1.0' """ +from opentelemetry.sdk._configuration._meter_provider import ( + configure_meter_provider, + create_meter_provider, +) +from opentelemetry.sdk._configuration._propagator import ( + configure_propagator, + create_propagator, +) +from opentelemetry.sdk._configuration._resource import create_resource from opentelemetry.sdk._configuration.file._env_substitution import ( EnvSubstitutionError, substitute_env_vars, @@ -38,4 +47,9 @@ "substitute_env_vars", "ConfigurationError", "EnvSubstitutionError", + "create_resource", + "create_propagator", + "configure_propagator", + "create_meter_provider", + "configure_meter_provider", ] diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/_loader.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/_loader.py index 6019166669..2649398170 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/_loader.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/file/_loader.py @@ -20,6 +20,7 @@ from pathlib import Path from typing import Any +from opentelemetry.sdk._configuration._exceptions import ConfigurationError from opentelemetry.sdk._configuration.file._env_substitution import ( substitute_env_vars, ) @@ -59,15 +60,8 @@ def _get_schema() -> dict: _logger = logging.getLogger(__name__) -class ConfigurationError(Exception): - """Raised when configuration file loading, parsing, or validation fails. - - This includes errors from: - - File not found or inaccessible - - Invalid YAML/JSON syntax - - Schema validation failures - - Environment variable substitution errors - """ +# Re-export for backwards compatibility +__all__ = ["ConfigurationError", "load_config_file"] def load_config_file(file_path: str) -> OpenTelemetryConfiguration: diff --git a/opentelemetry-sdk/tests/_configuration/test_meter_provider.py b/opentelemetry-sdk/tests/_configuration/test_meter_provider.py new file mode 100644 index 0000000000..04d60847f0 --- /dev/null +++ b/opentelemetry-sdk/tests/_configuration/test_meter_provider.py @@ -0,0 +1,640 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Tests access private members of SDK classes to assert correct configuration. +# pylint: disable=protected-access + +import os +import sys +import unittest +from unittest.mock import MagicMock, patch + +from opentelemetry.sdk._configuration._meter_provider import ( + configure_meter_provider, + create_meter_provider, +) +from opentelemetry.sdk._configuration.file._loader import ConfigurationError +from opentelemetry.sdk._configuration.models import ( + Aggregation as AggregationConfig, +) +from opentelemetry.sdk._configuration.models import ( + Base2ExponentialBucketHistogramAggregation as Base2Config, +) +from opentelemetry.sdk._configuration.models import ( + ConsoleMetricExporter as ConsoleMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + ExemplarFilter as ExemplarFilterConfig, +) +from opentelemetry.sdk._configuration.models import ( + ExplicitBucketHistogramAggregation as ExplicitBucketConfig, +) +from opentelemetry.sdk._configuration.models import ( + ExporterDefaultHistogramAggregation, + ExporterTemporalityPreference, + IncludeExclude, + InstrumentType, + ViewSelector, + ViewStream, +) +from opentelemetry.sdk._configuration.models import ( + MeterProvider as MeterProviderConfig, +) +from opentelemetry.sdk._configuration.models import ( + MetricReader as MetricReaderConfig, +) +from opentelemetry.sdk._configuration.models import ( + OtlpGrpcMetricExporter as OtlpGrpcMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + OtlpHttpMetricExporter as OtlpHttpMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + PeriodicMetricReader as PeriodicMetricReaderConfig, +) +from opentelemetry.sdk._configuration.models import ( + PushMetricExporter as PushMetricExporterConfig, +) +from opentelemetry.sdk._configuration.models import ( + View as ViewConfig, +) +from opentelemetry.sdk.metrics import ( + AlwaysOffExemplarFilter, + AlwaysOnExemplarFilter, + Counter, + Histogram, + MeterProvider, + ObservableCounter, + ObservableGauge, + ObservableUpDownCounter, + TraceBasedExemplarFilter, + UpDownCounter, +) +from opentelemetry.sdk.metrics.export import ( + AggregationTemporality, + ConsoleMetricExporter, + PeriodicExportingMetricReader, +) +from opentelemetry.sdk.metrics.view import ( + DefaultAggregation, + DropAggregation, + ExplicitBucketHistogramAggregation, + ExponentialBucketHistogramAggregation, + LastValueAggregation, + SumAggregation, + View, +) +from opentelemetry.sdk.resources import Resource + + +class TestCreateMeterProviderBasic(unittest.TestCase): + def test_none_config_returns_provider(self): + provider = create_meter_provider(None) + self.assertIsInstance(provider, MeterProvider) + + def test_none_config_uses_supplied_resource(self): + resource = Resource({"service.name": "svc"}) + provider = create_meter_provider(None, resource) + self.assertIs(provider._sdk_config.resource, resource) + + def test_none_config_no_readers(self): + provider = create_meter_provider(None) + self.assertEqual(len(provider._sdk_config.metric_readers), 0) + + def test_none_config_uses_trace_based_exemplar_filter(self): + provider = create_meter_provider(None) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, TraceBasedExemplarFilter + ) + + def test_none_config_does_not_read_exemplar_filter_env_var(self): + with patch.dict( + os.environ, {"OTEL_METRICS_EXEMPLAR_FILTER": "always_on"} + ): + provider = create_meter_provider(None) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, TraceBasedExemplarFilter + ) + + def test_none_config_does_not_read_interval_env_var(self): + config = MeterProviderConfig( + readers=[ + MetricReaderConfig( + periodic=PeriodicMetricReaderConfig( + exporter=PushMetricExporterConfig( + console=ConsoleMetricExporterConfig() + ) + ) + ) + ] + ) + with patch.dict(os.environ, {"OTEL_METRIC_EXPORT_INTERVAL": "999999"}): + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertIsInstance(reader, PeriodicExportingMetricReader) + self.assertEqual(reader._export_interval_millis, 60000.0) + + def test_configure_none_does_not_set_global(self): + original = __import__( + "opentelemetry.metrics", fromlist=["get_meter_provider"] + ).get_meter_provider() + configure_meter_provider(None) + after = __import__( + "opentelemetry.metrics", fromlist=["get_meter_provider"] + ).get_meter_provider() + self.assertIs(original, after) + + def test_configure_with_config_sets_global(self): + config = MeterProviderConfig(readers=[]) + with patch( + "opentelemetry.sdk._configuration._meter_provider.metrics.set_meter_provider" + ) as mock_set: + configure_meter_provider(config) + mock_set.assert_called_once() + arg = mock_set.call_args[0][0] + self.assertIsInstance(arg, MeterProvider) + + def test_empty_readers_list(self): + config = MeterProviderConfig(readers=[]) + provider = create_meter_provider(config) + self.assertEqual(len(provider._sdk_config.metric_readers), 0) + + +class TestCreateMetricReaders(unittest.TestCase): + @staticmethod + def _make_periodic_config(exporter_config, interval=None, timeout=None): + return MeterProviderConfig( + readers=[ + MetricReaderConfig( + periodic=PeriodicMetricReaderConfig( + exporter=exporter_config, + interval=interval, + timeout=timeout, + ) + ) + ] + ) + + def test_console_exporter(self): + config = self._make_periodic_config( + PushMetricExporterConfig(console=ConsoleMetricExporterConfig()) + ) + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertIsInstance(reader, PeriodicExportingMetricReader) + self.assertIsInstance(reader._exporter, ConsoleMetricExporter) + + def test_periodic_reader_default_interval(self): + config = self._make_periodic_config( + PushMetricExporterConfig(console=ConsoleMetricExporterConfig()) + ) + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertEqual(reader._export_interval_millis, 60000.0) + + def test_periodic_reader_default_timeout(self): + config = self._make_periodic_config( + PushMetricExporterConfig(console=ConsoleMetricExporterConfig()) + ) + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertEqual(reader._export_timeout_millis, 30000.0) + + def test_periodic_reader_explicit_interval(self): + config = self._make_periodic_config( + PushMetricExporterConfig(console=ConsoleMetricExporterConfig()), + interval=5000, + ) + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertEqual(reader._export_interval_millis, 5000.0) + + def test_periodic_reader_explicit_timeout(self): + config = self._make_periodic_config( + PushMetricExporterConfig(console=ConsoleMetricExporterConfig()), + timeout=10000, + ) + provider = create_meter_provider(config) + reader = provider._sdk_config.metric_readers[0] + self.assertEqual(reader._export_timeout_millis, 10000.0) + + def test_otlp_http_missing_package_raises(self): + config = self._make_periodic_config( + PushMetricExporterConfig(otlp_http=OtlpHttpMetricExporterConfig()) + ) + with patch.dict( + sys.modules, + { + "opentelemetry.exporter.otlp.proto.http.metric_exporter": None, + "opentelemetry.exporter.otlp.proto.http": None, + }, + ): + with self.assertRaises(ConfigurationError) as ctx: + create_meter_provider(config) + self.assertIn("otlp-proto-http", str(ctx.exception)) + + def test_otlp_http_created_with_endpoint(self): + mock_exporter_cls = MagicMock() + mock_compression_cls = MagicMock() + mock_http_module = MagicMock() + mock_http_module.Compression = mock_compression_cls + mock_module = MagicMock() + mock_module.OTLPMetricExporter = mock_exporter_cls + + with patch.dict( + sys.modules, + { + "opentelemetry.exporter.otlp.proto.http.metric_exporter": mock_module, + "opentelemetry.exporter.otlp.proto.http": mock_http_module, + }, + ): + config = self._make_periodic_config( + PushMetricExporterConfig( + otlp_http=OtlpHttpMetricExporterConfig( + endpoint="http://localhost:4318" + ) + ) + ) + create_meter_provider(config) + + _, kwargs = mock_exporter_cls.call_args + self.assertEqual(kwargs["endpoint"], "http://localhost:4318") + self.assertIsNone(kwargs["headers"]) + self.assertIsNone(kwargs["timeout"]) + self.assertIsNone(kwargs["compression"]) + + def test_otlp_grpc_missing_package_raises(self): + config = self._make_periodic_config( + PushMetricExporterConfig(otlp_grpc=OtlpGrpcMetricExporterConfig()) + ) + with patch.dict( + sys.modules, + { + "opentelemetry.exporter.otlp.proto.grpc.metric_exporter": None, + "grpc": None, + }, + ): + with self.assertRaises(ConfigurationError) as ctx: + create_meter_provider(config) + self.assertIn("otlp-proto-grpc", str(ctx.exception)) + + def test_pull_reader_raises(self): + config = MeterProviderConfig( + readers=[MetricReaderConfig(pull=MagicMock())] + ) + with self.assertRaises(ConfigurationError): + create_meter_provider(config) + + def test_no_reader_type_raises(self): + config = MeterProviderConfig(readers=[MetricReaderConfig()]) + with self.assertRaises(ConfigurationError): + create_meter_provider(config) + + def test_no_exporter_type_raises(self): + config = self._make_periodic_config(PushMetricExporterConfig()) + with self.assertRaises(ConfigurationError): + create_meter_provider(config) + + def test_multiple_readers(self): + config = MeterProviderConfig( + readers=[ + MetricReaderConfig( + periodic=PeriodicMetricReaderConfig( + exporter=PushMetricExporterConfig( + console=ConsoleMetricExporterConfig() + ) + ) + ), + MetricReaderConfig( + periodic=PeriodicMetricReaderConfig( + exporter=PushMetricExporterConfig( + console=ConsoleMetricExporterConfig() + ) + ) + ), + ] + ) + provider = create_meter_provider(config) + self.assertEqual(len(provider._sdk_config.metric_readers), 2) + + +class TestTemporalityAndAggregation(unittest.TestCase): + @staticmethod + def _make_console_config(temporality=None, histogram_agg=None): + return MeterProviderConfig( + readers=[ + MetricReaderConfig( + periodic=PeriodicMetricReaderConfig( + exporter=PushMetricExporterConfig( + console=ConsoleMetricExporterConfig( + temporality_preference=temporality, + default_histogram_aggregation=histogram_agg, + ) + ) + ) + ) + ] + ) + + @staticmethod + def _get_exporter(config): + provider = create_meter_provider(config) + return provider._sdk_config.metric_readers[0]._exporter + + def test_default_temporality_is_cumulative(self): + exporter = self._get_exporter(self._make_console_config()) + for instrument_type in ( + Counter, + UpDownCounter, + Histogram, + ObservableCounter, + ObservableGauge, + ObservableUpDownCounter, + ): + self.assertEqual( + exporter._preferred_temporality[instrument_type], + AggregationTemporality.CUMULATIVE, + ) + + def test_cumulative_temporality(self): + exporter = self._get_exporter( + self._make_console_config( + temporality=ExporterTemporalityPreference.cumulative + ) + ) + self.assertEqual( + exporter._preferred_temporality[Counter], + AggregationTemporality.CUMULATIVE, + ) + + def test_delta_temporality(self): + exporter = self._get_exporter( + self._make_console_config( + temporality=ExporterTemporalityPreference.delta + ) + ) + self.assertEqual( + exporter._preferred_temporality[Counter], + AggregationTemporality.DELTA, + ) + self.assertEqual( + exporter._preferred_temporality[Histogram], + AggregationTemporality.DELTA, + ) + self.assertEqual( + exporter._preferred_temporality[UpDownCounter], + AggregationTemporality.CUMULATIVE, + ) + self.assertEqual( + exporter._preferred_temporality[ObservableCounter], + AggregationTemporality.DELTA, + ) + + def test_low_memory_temporality(self): + exporter = self._get_exporter( + self._make_console_config( + temporality=ExporterTemporalityPreference.low_memory + ) + ) + self.assertEqual( + exporter._preferred_temporality[Counter], + AggregationTemporality.DELTA, + ) + self.assertEqual( + exporter._preferred_temporality[ObservableCounter], + AggregationTemporality.CUMULATIVE, + ) + + def test_default_histogram_aggregation_is_explicit(self): + exporter = self._get_exporter(self._make_console_config()) + self.assertIsInstance( + exporter._preferred_aggregation[Histogram], + ExplicitBucketHistogramAggregation, + ) + + def test_explicit_histogram_aggregation(self): + exporter = self._get_exporter( + self._make_console_config( + histogram_agg=ExporterDefaultHistogramAggregation.explicit_bucket_histogram + ) + ) + self.assertIsInstance( + exporter._preferred_aggregation[Histogram], + ExplicitBucketHistogramAggregation, + ) + + def test_base2_exponential_histogram_aggregation(self): + exporter = self._get_exporter( + self._make_console_config( + histogram_agg=ExporterDefaultHistogramAggregation.base2_exponential_bucket_histogram + ) + ) + self.assertIsInstance( + exporter._preferred_aggregation[Histogram], + ExponentialBucketHistogramAggregation, + ) + + def test_temporality_suppresses_env_var(self): + with patch.dict( + os.environ, + {"OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE": "DELTA"}, + ): + exporter = self._get_exporter(self._make_console_config()) + # Config has no preference → default cumulative, env var ignored + self.assertEqual( + exporter._preferred_temporality[Counter], + AggregationTemporality.CUMULATIVE, + ) + + +class TestCreateViews(unittest.TestCase): + @staticmethod + def _make_view_config(selector_kwargs=None, stream_kwargs=None): + selector = ViewSelector( + **(selector_kwargs or {"instrument_name": "*"}) + ) + stream = ViewStream(**(stream_kwargs or {})) + return MeterProviderConfig( + readers=[], + views=[ViewConfig(selector=selector, stream=stream)], + ) + + @staticmethod + def _get_view(config): + provider = create_meter_provider(config) + return provider._sdk_config.views[0] + + def test_view_created(self): + config = self._make_view_config() + provider = create_meter_provider(config) + self.assertEqual(len(provider._sdk_config.views), 1) + self.assertIsInstance(provider._sdk_config.views[0], View) + + def test_selector_instrument_name(self): + view = self._get_view( + self._make_view_config({"instrument_name": "my.metric"}) + ) + self.assertEqual(view._instrument_name, "my.metric") + + def test_selector_instrument_type(self): + view = self._get_view( + self._make_view_config({"instrument_type": InstrumentType.counter}) + ) + self.assertIs(view._instrument_type, Counter) + + def test_selector_meter_name(self): + view = self._get_view( + self._make_view_config({"meter_name": "my.meter"}) + ) + self.assertEqual(view._meter_name, "my.meter") + + def test_stream_name(self): + view = self._get_view( + self._make_view_config( + {"instrument_name": "my.metric"}, + stream_kwargs={"name": "renamed"}, + ) + ) + self.assertEqual(view._name, "renamed") + + def test_stream_description(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={"description": "a description"} + ) + ) + self.assertEqual(view._description, "a description") + + def test_stream_attribute_keys_included(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={ + "attribute_keys": IncludeExclude(included=["key1", "key2"]) + } + ) + ) + self.assertEqual(view._attribute_keys, {"key1", "key2"}) + + def test_stream_attribute_keys_excluded_logs_warning(self): + config = self._make_view_config( + stream_kwargs={"attribute_keys": IncludeExclude(excluded=["key1"])} + ) + with self.assertLogs( + "opentelemetry.sdk._configuration._meter_provider", level="WARNING" + ) as log: + create_meter_provider(config) + self.assertTrue(any("excluded" in msg for msg in log.output)) + + def test_stream_aggregation_drop(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={"aggregation": AggregationConfig(drop={})} + ) + ) + self.assertIsInstance(view._aggregation, DropAggregation) + + def test_stream_aggregation_explicit_bucket_histogram_with_boundaries( + self, + ): + view = self._get_view( + self._make_view_config( + stream_kwargs={ + "aggregation": AggregationConfig( + explicit_bucket_histogram=ExplicitBucketConfig( + boundaries=[1.0, 5.0, 10.0] + ) + ) + } + ) + ) + self.assertIsInstance( + view._aggregation, ExplicitBucketHistogramAggregation + ) + self.assertEqual(list(view._aggregation._boundaries), [1.0, 5.0, 10.0]) + + def test_stream_aggregation_base2_exponential_with_params(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={ + "aggregation": AggregationConfig( + base2_exponential_bucket_histogram=Base2Config( + max_size=64, max_scale=5 + ) + ) + } + ) + ) + self.assertIsInstance( + view._aggregation, ExponentialBucketHistogramAggregation + ) + + def test_stream_aggregation_last_value(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={"aggregation": AggregationConfig(last_value={})} + ) + ) + self.assertIsInstance(view._aggregation, LastValueAggregation) + + def test_stream_aggregation_sum(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={"aggregation": AggregationConfig(sum={})} + ) + ) + self.assertIsInstance(view._aggregation, SumAggregation) + + def test_stream_aggregation_default(self): + view = self._get_view( + self._make_view_config( + stream_kwargs={"aggregation": AggregationConfig(default={})} + ) + ) + self.assertIsInstance(view._aggregation, DefaultAggregation) + + +class TestExemplarFilter(unittest.TestCase): + @staticmethod + def _make_config(exemplar_filter): + return MeterProviderConfig(readers=[], exemplar_filter=exemplar_filter) + + def test_always_on(self): + provider = create_meter_provider( + self._make_config(ExemplarFilterConfig.always_on) + ) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, AlwaysOnExemplarFilter + ) + + def test_always_off(self): + provider = create_meter_provider( + self._make_config(ExemplarFilterConfig.always_off) + ) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, AlwaysOffExemplarFilter + ) + + def test_trace_based(self): + provider = create_meter_provider( + self._make_config(ExemplarFilterConfig.trace_based) + ) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, TraceBasedExemplarFilter + ) + + def test_absent_defaults_to_trace_based(self): + provider = create_meter_provider(MeterProviderConfig(readers=[])) + self.assertIsInstance( + provider._sdk_config.exemplar_filter, TraceBasedExemplarFilter + ) diff --git a/opentelemetry-sdk/tests/_configuration/test_propagator.py b/opentelemetry-sdk/tests/_configuration/test_propagator.py new file mode 100644 index 0000000000..e22acfca0f --- /dev/null +++ b/opentelemetry-sdk/tests/_configuration/test_propagator.py @@ -0,0 +1,259 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from unittest.mock import MagicMock, patch + +# CompositePropagator stores its propagators in _propagators (private). +# We access it here to assert composition correctness. +# pylint: disable=protected-access +from opentelemetry.baggage.propagation import W3CBaggagePropagator +from opentelemetry.propagators.composite import CompositePropagator +from opentelemetry.sdk._configuration._propagator import ( + configure_propagator, + create_propagator, +) +from opentelemetry.sdk._configuration.file._loader import ConfigurationError +from opentelemetry.sdk._configuration.models import ( + Propagator as PropagatorConfig, +) +from opentelemetry.sdk._configuration.models import ( + TextMapPropagator as TextMapPropagatorConfig, +) +from opentelemetry.trace.propagation.tracecontext import ( + TraceContextTextMapPropagator, +) + + +class TestCreatePropagator(unittest.TestCase): + def test_none_config_returns_empty_composite(self): + result = create_propagator(None) + self.assertIsInstance(result, CompositePropagator) + self.assertEqual(result._propagators, []) # type: ignore[attr-defined] + + def test_empty_config_returns_empty_composite(self): + result = create_propagator(PropagatorConfig()) + self.assertIsInstance(result, CompositePropagator) + self.assertEqual(result._propagators, []) # type: ignore[attr-defined] + + def test_tracecontext_only(self): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(tracecontext={})] + ) + result = create_propagator(config) + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + self.assertIsInstance( + result._propagators[0], + TraceContextTextMapPropagator, # type: ignore[attr-defined] + ) + + def test_baggage_only(self): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(baggage={})] + ) + result = create_propagator(config) + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + self.assertIsInstance(result._propagators[0], W3CBaggagePropagator) # type: ignore[attr-defined] + + def test_tracecontext_and_baggage(self): + config = PropagatorConfig( + composite=[ + TextMapPropagatorConfig(tracecontext={}), + TextMapPropagatorConfig(baggage={}), + ] + ) + result = create_propagator(config) + self.assertEqual(len(result._propagators), 2) # type: ignore[attr-defined] + self.assertIsInstance( + result._propagators[0], + TraceContextTextMapPropagator, # type: ignore[attr-defined] + ) + self.assertIsInstance(result._propagators[1], W3CBaggagePropagator) # type: ignore[attr-defined] + + def test_b3_via_entry_point(self): + mock_propagator = MagicMock() + mock_ep = MagicMock() + mock_ep.load.return_value = lambda: mock_propagator + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(b3={})] + ) + result = create_propagator(config) + + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + self.assertIs(result._propagators[0], mock_propagator) # type: ignore[attr-defined] + + def test_b3multi_via_entry_point(self): + mock_propagator = MagicMock() + mock_ep = MagicMock() + mock_ep.load.return_value = lambda: mock_propagator + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(b3multi={})] + ) + result = create_propagator(config) + + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + + def test_b3_not_installed_raises_configuration_error(self): + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[], + ): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(b3={})] + ) + with self.assertRaises(ConfigurationError) as ctx: + create_propagator(config) + self.assertIn("b3", str(ctx.exception)) + + def test_composite_list_tracecontext(self): + config = PropagatorConfig(composite_list="tracecontext") + mock_tc = TraceContextTextMapPropagator() + mock_ep = MagicMock() + mock_ep.load.return_value = lambda: mock_tc + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + result = create_propagator(config) + + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + + def test_composite_list_multiple(self): + mock_tc = TraceContextTextMapPropagator() + mock_baggage = W3CBaggagePropagator() + mock_ep_tc = MagicMock() + mock_ep_tc.load.return_value = lambda: mock_tc + mock_ep_baggage = MagicMock() + mock_ep_baggage.load.return_value = lambda: mock_baggage + + def fake_entry_points(group, name): + if name == "tracecontext": + return [mock_ep_tc] + if name == "baggage": + return [mock_ep_baggage] + return [] + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + side_effect=fake_entry_points, + ): + config = PropagatorConfig(composite_list="tracecontext,baggage") + result = create_propagator(config) + + self.assertEqual(len(result._propagators), 2) # type: ignore[attr-defined] + + def test_composite_list_none_entry_skipped(self): + config = PropagatorConfig(composite_list="none") + result = create_propagator(config) + self.assertEqual(result._propagators, []) # type: ignore[attr-defined] + + def test_composite_list_empty_entries_skipped(self): + config = PropagatorConfig(composite_list=",, ,") + result = create_propagator(config) + self.assertEqual(result._propagators, []) # type: ignore[attr-defined] + + def test_composite_list_whitespace_around_names(self): + mock_tc = TraceContextTextMapPropagator() + mock_ep = MagicMock() + mock_ep.load.return_value = lambda: mock_tc + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + config = PropagatorConfig(composite_list=" tracecontext ") + result = create_propagator(config) + + self.assertEqual(len(result._propagators), 1) # type: ignore[attr-defined] + + def test_entry_point_load_exception_raises_configuration_error(self): + mock_ep = MagicMock() + mock_ep.load.side_effect = RuntimeError("package broken") + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + config = PropagatorConfig(composite_list="broken-prop") + with self.assertRaises(ConfigurationError) as ctx: + create_propagator(config) + self.assertIn("broken-prop", str(ctx.exception)) + + def test_deduplication_across_composite_and_composite_list(self): + """Same propagator type from both composite and composite_list is deduplicated.""" + mock_tc = TraceContextTextMapPropagator() + mock_ep = MagicMock() + mock_ep.load.return_value = lambda: mock_tc + + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[mock_ep], + ): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(tracecontext={})], + composite_list="tracecontext", + ) + result = create_propagator(config) + + # Only one TraceContextTextMapPropagator despite being in both + tc_count = sum( + 1 + for p in result._propagators # type: ignore[attr-defined] + if isinstance(p, TraceContextTextMapPropagator) + ) + self.assertEqual(tc_count, 1) + + def test_unknown_composite_list_propagator_raises(self): + with patch( + "opentelemetry.sdk._configuration._propagator.entry_points", + return_value=[], + ): + config = PropagatorConfig(composite_list="nonexistent") + with self.assertRaises(ConfigurationError): + create_propagator(config) + + +class TestConfigurePropagator(unittest.TestCase): + def test_configure_propagator_calls_set_global_textmap(self): + with patch( + "opentelemetry.sdk._configuration._propagator.set_global_textmap" + ) as mock_set: + configure_propagator(None) + mock_set.assert_called_once() + arg = mock_set.call_args[0][0] + self.assertIsInstance(arg, CompositePropagator) + + def test_configure_propagator_with_config(self): + config = PropagatorConfig( + composite=[TextMapPropagatorConfig(tracecontext={})] + ) + with patch( + "opentelemetry.sdk._configuration._propagator.set_global_textmap" + ) as mock_set: + configure_propagator(config) + mock_set.assert_called_once() + propagator = mock_set.call_args[0][0] + self.assertIsInstance(propagator, CompositePropagator) + self.assertEqual(len(propagator._propagators), 1) # type: ignore[attr-defined] diff --git a/opentelemetry-sdk/tests/_configuration/test_resource.py b/opentelemetry-sdk/tests/_configuration/test_resource.py new file mode 100644 index 0000000000..b50bc03fff --- /dev/null +++ b/opentelemetry-sdk/tests/_configuration/test_resource.py @@ -0,0 +1,297 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest +from unittest.mock import patch + +from opentelemetry.sdk._configuration._resource import create_resource +from opentelemetry.sdk._configuration.models import ( + AttributeNameValue, + AttributeType, +) +from opentelemetry.sdk._configuration.models import Resource as ResourceConfig +from opentelemetry.sdk.resources import ( + SERVICE_NAME, + TELEMETRY_SDK_LANGUAGE, + TELEMETRY_SDK_NAME, + TELEMETRY_SDK_VERSION, + Resource, +) + + +class TestCreateResourceDefaults(unittest.TestCase): + def test_none_config_returns_sdk_defaults(self): + resource = create_resource(None) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.attributes[TELEMETRY_SDK_LANGUAGE], "python") + self.assertEqual( + resource.attributes[TELEMETRY_SDK_NAME], "opentelemetry" + ) + self.assertIn(TELEMETRY_SDK_VERSION, resource.attributes) + self.assertEqual(resource.attributes[SERVICE_NAME], "unknown_service") + + def test_none_config_does_not_read_env_vars(self): + with patch.dict( + os.environ, + { + "OTEL_RESOURCE_ATTRIBUTES": "foo=bar", + "OTEL_SERVICE_NAME": "my-service", + }, + ): + resource = create_resource(None) + self.assertNotIn("foo", resource.attributes) + self.assertEqual(resource.attributes[SERVICE_NAME], "unknown_service") + + def test_empty_resource_config(self): + resource = create_resource(ResourceConfig()) + self.assertEqual(resource.attributes[TELEMETRY_SDK_LANGUAGE], "python") + self.assertEqual(resource.attributes[SERVICE_NAME], "unknown_service") + + def test_service_name_default_added_when_missing(self): + config = ResourceConfig( + attributes=[AttributeNameValue(name="env", value="staging")] + ) + resource = create_resource(config) + self.assertEqual(resource.attributes[SERVICE_NAME], "unknown_service") + + def test_service_name_not_overridden_when_set(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue(name="service.name", value="my-app") + ] + ) + resource = create_resource(config) + self.assertEqual(resource.attributes[SERVICE_NAME], "my-app") + + def test_env_vars_not_read(self): + """OTEL_RESOURCE_ATTRIBUTES must not affect declarative config resource.""" + with patch.dict( + os.environ, + {"OTEL_RESOURCE_ATTRIBUTES": "injected=true"}, + ): + config = ResourceConfig( + attributes=[AttributeNameValue(name="k", value="v")] + ) + resource = create_resource(config) + self.assertNotIn("injected", resource.attributes) + + def test_schema_url(self): + config = ResourceConfig( + schema_url="https://opentelemetry.io/schemas/1.24.0" + ) + resource = create_resource(config) + self.assertEqual( + resource.schema_url, "https://opentelemetry.io/schemas/1.24.0" + ) + + def test_schema_url_none(self): + resource = create_resource(ResourceConfig()) + self.assertEqual(resource.schema_url, "") + + +class TestCreateResourceAttributes(unittest.TestCase): + def test_attributes_plain(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue(name="service.name", value="my-service"), + AttributeNameValue(name="env", value="production"), + ] + ) + resource = create_resource(config) + self.assertEqual(resource.attributes["service.name"], "my-service") + self.assertEqual(resource.attributes["env"], "production") + # SDK defaults still present + self.assertEqual(resource.attributes[TELEMETRY_SDK_LANGUAGE], "python") + + def test_attribute_type_string(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", value=42, type=AttributeType.string + ) + ] + ) + resource = create_resource(config) + self.assertEqual(resource.attributes["k"], "42") + self.assertIsInstance(resource.attributes["k"], str) + + def test_attribute_type_int(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue(name="k", value=3.0, type=AttributeType.int) + ] + ) + resource = create_resource(config) + self.assertEqual(resource.attributes["k"], 3) + self.assertIsInstance(resource.attributes["k"], int) + + def test_attribute_type_double(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", value="1.5", type=AttributeType.double + ) + ] + ) + resource = create_resource(config) + self.assertAlmostEqual(resource.attributes["k"], 1.5) # type: ignore[arg-type] + self.assertIsInstance(resource.attributes["k"], float) + + def test_attribute_type_bool(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", value="true", type=AttributeType.bool + ) + ] + ) + resource = create_resource(config) + self.assertTrue(resource.attributes["k"]) + + def test_attribute_type_bool_false_string(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", value="false", type=AttributeType.bool + ) + ] + ) + resource = create_resource(config) + self.assertFalse(resource.attributes["k"]) + + def test_attribute_type_string_array(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", + value=["a", "b"], + type=AttributeType.string_array, + ) + ] + ) + resource = create_resource(config) + self.assertEqual(list(resource.attributes["k"]), ["a", "b"]) # type: ignore[arg-type] + + def test_attribute_type_int_array(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", + value=[1.0, 2.0], + type=AttributeType.int_array, + ) + ] + ) + resource = create_resource(config) + self.assertEqual(list(resource.attributes["k"]), [1, 2]) # type: ignore[arg-type] + + def test_attribute_type_double_array(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", + value=[1, 2], + type=AttributeType.double_array, + ) + ] + ) + resource = create_resource(config) + self.assertEqual(list(resource.attributes["k"]), [1.0, 2.0]) # type: ignore[arg-type] + + def test_attribute_type_bool_array(self): + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", + value=[True, False], + type=AttributeType.bool_array, + ) + ] + ) + resource = create_resource(config) + self.assertEqual(list(resource.attributes["k"]), [True, False]) # type: ignore[arg-type] + + def test_attribute_type_bool_array_string_values(self): + """bool_array must use _coerce_bool, not plain bool() — 'false' must be False.""" + config = ResourceConfig( + attributes=[ + AttributeNameValue( + name="k", + value=["true", "false"], + type=AttributeType.bool_array, + ) + ] + ) + resource = create_resource(config) + self.assertEqual(list(resource.attributes["k"]), [True, False]) # type: ignore[arg-type] + + +class TestCreateResourceAttributesList(unittest.TestCase): + def test_attributes_list_parsed(self): + config = ResourceConfig( + attributes_list="service.name=my-svc,region=us-east-1" + ) + resource = create_resource(config) + self.assertEqual(resource.attributes["service.name"], "my-svc") + self.assertEqual(resource.attributes["region"], "us-east-1") + + def test_attributes_list_does_not_override_attributes(self): + """Explicit attributes take precedence over attributes_list.""" + config = ResourceConfig( + attributes=[ + AttributeNameValue(name="service.name", value="explicit") + ], + attributes_list="service.name=from-list,extra=val", + ) + resource = create_resource(config) + self.assertEqual(resource.attributes["service.name"], "explicit") + self.assertEqual(resource.attributes["extra"], "val") + + def test_attributes_list_only_includes_sdk_defaults(self): + """attributes_list alone should still include telemetry.sdk.* defaults.""" + config = ResourceConfig(attributes_list="env=prod") + resource = create_resource(config) + self.assertEqual(resource.attributes["env"], "prod") + self.assertEqual(resource.attributes[TELEMETRY_SDK_LANGUAGE], "python") + + def test_attributes_list_value_containing_equals(self): + """Values containing '=' should be preserved intact.""" + config = ResourceConfig(attributes_list="token=abc=def=ghi") + resource = create_resource(config) + self.assertEqual(resource.attributes["token"], "abc=def=ghi") + + def test_attributes_list_empty_pairs_skipped(self): + config = ResourceConfig(attributes_list=",foo=bar,,") + resource = create_resource(config) + self.assertEqual(resource.attributes["foo"], "bar") + + def test_attributes_list_url_decoded(self): + config = ResourceConfig( + attributes_list="service.namespace=my%20namespace,region=us-east-1" + ) + resource = create_resource(config) + self.assertEqual( + resource.attributes["service.namespace"], "my namespace" + ) + + def test_attributes_list_invalid_pair_skipped(self): + with self.assertLogs( + "opentelemetry.sdk._configuration._resource", level="WARNING" + ) as cm: + config = ResourceConfig(attributes_list="no-equals,foo=bar") + resource = create_resource(config) + self.assertEqual(resource.attributes["foo"], "bar") + self.assertNotIn("no-equals", resource.attributes) + self.assertTrue(any("no-equals" in msg for msg in cm.output))