Skip to content
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
([#4996](https://github.com/open-telemetry/opentelemetry-python/pull/4996))
- `opentelemetry-exporter-otlp-proto-grpc`: make retryable gRPC error codes configurable for gRPC exporters
([#4917](https://github.com/open-telemetry/opentelemetry-python/pull/4917))
- [BREAKING] `opentelemetry-sdk`, `opentelemetry-exporter-otlp-proto-common`: Add support for metric data-point flags
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  • New feature (non-breaking change which adds functionality)

Is this breaking?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This might be overkill to include in the change log, but if you look closely in the PR, I did reorder and change the type of the flags field in ExponentialHistogramDataPoint, so it's technically a breaking change to just ExponentialHistogramDataPoint.

([#4916](https://github.com/open-telemetry/opentelemetry-python/pull/4916))

## Version 1.41.0/0.62b0 (2026-04-09)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ def _encode_metric(metric, pb2_metric):
attributes=_encode_attributes(data_point.attributes),
time_unix_nano=data_point.time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
flags=int(data_point.flags),
)
if isinstance(data_point.value, int):
pt.as_int = data_point.value
Expand All @@ -260,6 +261,7 @@ def _encode_metric(metric, pb2_metric):
explicit_bounds=data_point.explicit_bounds,
max=data_point.max,
min=data_point.min,
flags=int(data_point.flags),
)
pb2_metric.histogram.aggregation_temporality = (
metric.data.aggregation_temporality
Expand All @@ -273,6 +275,7 @@ def _encode_metric(metric, pb2_metric):
start_time_unix_nano=data_point.start_time_unix_nano,
time_unix_nano=data_point.time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
flags=int(data_point.flags),
)
if isinstance(data_point.value, int):
pt.as_int = data_point.value
Expand Down Expand Up @@ -316,9 +319,9 @@ def _encode_metric(metric, pb2_metric):
zero_count=data_point.zero_count,
positive=positive,
negative=negative,
flags=data_point.flags,
max=data_point.max,
min=data_point.min,
flags=int(data_point.flags),
)
pb2_metric.exponential_histogram.aggregation_temporality = (
metric.data.aggregation_temporality
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from opentelemetry.sdk.metrics.export import (
AggregationTemporality,
Buckets,
DataPointFlags,
ExponentialHistogramDataPoint,
HistogramDataPoint,
Metric,
Expand Down Expand Up @@ -80,6 +81,7 @@ class TestOTLPMetricsEncoder(unittest.TestCase):
explicit_bounds=[10.0, 20.0],
min=8,
max=18,
flags=DataPointFlags.get_default(),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand All @@ -101,7 +103,13 @@ def test_encode_sum_int(self):
version="first_version",
schema_url="instrumentation_scope_schema_url",
),
metrics=[_generate_sum("sum_int", 33)],
metrics=[
_generate_sum(
"sum_int",
33,
flags=DataPointFlags.get_default(),
)
],
schema_url="instrumentation_scope_schema_url",
)
],
Expand Down Expand Up @@ -152,6 +160,9 @@ def test_encode_sum_int(self):
start_time_unix_nano=1641946015139533244,
time_unix_nano=1641946016139533244,
as_int=33,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.CUMULATIVE,
Expand Down Expand Up @@ -504,6 +515,9 @@ def test_encode_histogram(self):
],
max=18.0,
min=8.0,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand Down Expand Up @@ -635,6 +649,9 @@ def test_encode_multiple_scope_histogram(self):
],
max=18.0,
min=8.0,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand Down Expand Up @@ -697,6 +714,9 @@ def test_encode_multiple_scope_histogram(self):
],
max=18.0,
min=8.0,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand Down Expand Up @@ -767,6 +787,9 @@ def test_encode_multiple_scope_histogram(self):
],
max=18.0,
min=8.0,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand Down Expand Up @@ -837,6 +860,9 @@ def test_encode_multiple_scope_histogram(self):
],
max=18.0,
min=8.0,
flags=int(
DataPointFlags.get_default()
),
)
],
aggregation_temporality=AggregationTemporality.DELTA,
Expand Down Expand Up @@ -868,7 +894,7 @@ def test_encode_exponential_histogram(self):
zero_count=5,
positive=Buckets(offset=6, bucket_counts=[7, 8]),
negative=Buckets(offset=9, bucket_counts=[10, 11]),
flags=12,
flags=DataPointFlags.get_default(),
Comment thread
herin049 marked this conversation as resolved.
min=13.0,
max=14.0,
)
Expand Down Expand Up @@ -953,7 +979,9 @@ def test_encode_exponential_histogram(self):
offset=9,
bucket_counts=[10, 11],
),
flags=12,
flags=int(
DataPointFlags.get_default()
),
exemplars=[],
min=13.0,
max=14.0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
from opentelemetry.sdk.metrics._internal.measurement import Measurement
from opentelemetry.sdk.metrics._internal.point import Buckets as BucketsPoint
from opentelemetry.sdk.metrics._internal.point import (
DataPointFlags,
ExponentialHistogramDataPoint,
HistogramDataPoint,
NumberDataPoint,
Expand Down Expand Up @@ -873,10 +874,9 @@ def collect(
offset=value_negative.offset,
bucket_counts=(value_negative.get_offset_counts()),
),
# FIXME: Find the right value for flags
flags=0,
min=min_,
max=max_,
flags=DataPointFlags.get_default(),
)

# Here collection_temporality is CUMULATIVE.
Expand Down Expand Up @@ -1049,10 +1049,9 @@ def collect(
self._previous_value_negative.get_offset_counts()
),
),
# FIXME: Find the right value for flags
flags=0,
min=self._previous_min,
max=self._previous_max,
flags=DataPointFlags.get_default(),
)

return None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,36 @@
from opentelemetry.util.types import Attributes


class DataPointFlags(int):
"""A bitmask that represents options specific to the data point.

The only supported option is the "no recorded value" flag (``0x01``). If
set, this flag reflects explicitly missing data in a series. It serves as
an indicator that a previously present timeseries was removed and that
this timeseries SHOULD NOT be returned in queries after such an indicator
was received. It is an equivalent of the Prometheus staleness marker.

If this flag is set, all other data point properties except attributes,
time stamps, or time windows, SHOULD be ignored.

See the `OpenTelemetry Data Point Flags`_ spec for details.

.. _OpenTelemetry Data Point Flags:
https://opentelemetry.io/docs/specs/otel/metrics/data-model/#data-point-flags
"""

DEFAULT = 0x00
NO_RECORDED_VALUE = 0x01

@classmethod
def get_default(cls) -> "DataPointFlags":
return cls(cls.DEFAULT)

@property
def no_recorded_value(self) -> bool:
return bool(self & self.NO_RECORDED_VALUE)


@dataclass(frozen=True)
class NumberDataPoint:
"""Single data point in a timeseries that describes the time-varying scalar
Expand All @@ -26,6 +56,7 @@ class NumberDataPoint:
time_unix_nano: int
value: int | float
exemplars: Sequence[Exemplar] = field(default_factory=list)
flags: DataPointFlags = DataPointFlags.get_default()

def to_json(self, indent: int | None = 4) -> str:
return dumps(asdict(self), indent=indent)
Expand All @@ -47,6 +78,7 @@ class HistogramDataPoint:
min: float
max: float
exemplars: Sequence[Exemplar] = field(default_factory=list)
flags: DataPointFlags = DataPointFlags.get_default()

def to_json(self, indent: int | None = 4) -> str:
return dumps(asdict(self), indent=indent)
Expand Down Expand Up @@ -74,10 +106,10 @@ class ExponentialHistogramDataPoint:
zero_count: int
positive: Buckets
negative: Buckets
flags: int
min: float
max: float
exemplars: Sequence[Exemplar] = field(default_factory=list)
flags: DataPointFlags = DataPointFlags.get_default()

def to_json(self, indent: int | None = 4) -> str:
return dumps(asdict(self), indent=indent)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# The point module is not in the export directory to avoid a circular import.
from opentelemetry.sdk.metrics._internal.point import ( # noqa: F401
Buckets,
DataPointFlags,
DataPointT,
DataT,
ExponentialHistogram,
Expand All @@ -41,6 +42,7 @@
"MetricExportResult",
"MetricReader",
"PeriodicExportingMetricReader",
"DataPointFlags",
"DataPointT",
"DataT",
"ExponentialHistogram",
Expand Down
35 changes: 29 additions & 6 deletions opentelemetry-sdk/tests/metrics/test_point.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from opentelemetry.sdk.metrics.export import (
AggregationTemporality,
Buckets,
DataPointFlags,
ExponentialHistogram,
ExponentialHistogramDataPoint,
Gauge,
Expand Down Expand Up @@ -55,15 +56,16 @@ def setUpClass(cls):
time_unix_nano=2,
value=3.3,
)
cls.number_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "value": 3.3, "exemplars": []}}'
cls.number_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "value": 3.3, "exemplars": [], "flags": {DataPointFlags.get_default()}}}'

cls.number_data_point_1 = NumberDataPoint(
attributes=cls.attributes_1,
start_time_unix_nano=2,
time_unix_nano=3,
value=4.4,
flags=DataPointFlags(DataPointFlags.NO_RECORDED_VALUE),
)
cls.number_data_point_1_str = f'{{"attributes": {cls.attributes_1_str}, "start_time_unix_nano": 2, "time_unix_nano": 3, "value": 4.4, "exemplars": []}}'
cls.number_data_point_1_str = f'{{"attributes": {cls.attributes_1_str}, "start_time_unix_nano": 2, "time_unix_nano": 3, "value": 4.4, "exemplars": [], "flags": {DataPointFlags.NO_RECORDED_VALUE}}}'

cls.histogram_data_point_0 = HistogramDataPoint(
attributes=cls.attributes_0,
Expand All @@ -76,7 +78,7 @@ def setUpClass(cls):
min=0.2,
max=3.3,
)
cls.histogram_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "count": 3, "sum": 3.3, "bucket_counts": [1, 1, 1], "explicit_bounds": [0.1, 1.2, 2.3, 3.4], "min": 0.2, "max": 3.3, "exemplars": []}}'
cls.histogram_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "count": 3, "sum": 3.3, "bucket_counts": [1, 1, 1], "explicit_bounds": [0.1, 1.2, 2.3, 3.4], "min": 0.2, "max": 3.3, "exemplars": [], "flags": {DataPointFlags.get_default()}}}'

cls.histogram_data_point_1 = HistogramDataPoint(
attributes=cls.attributes_1,
Expand All @@ -88,8 +90,9 @@ def setUpClass(cls):
explicit_bounds=[1.2, 2.3, 3.4, 4.5],
min=0.3,
max=4.4,
flags=DataPointFlags(DataPointFlags.NO_RECORDED_VALUE),
)
cls.histogram_data_point_1_str = f'{{"attributes": {cls.attributes_1_str}, "start_time_unix_nano": 2, "time_unix_nano": 3, "count": 4, "sum": 4.4, "bucket_counts": [2, 1, 1], "explicit_bounds": [1.2, 2.3, 3.4, 4.5], "min": 0.3, "max": 4.4, "exemplars": []}}'
cls.histogram_data_point_1_str = f'{{"attributes": {cls.attributes_1_str}, "start_time_unix_nano": 2, "time_unix_nano": 3, "count": 4, "sum": 4.4, "bucket_counts": [2, 1, 1], "explicit_bounds": [1.2, 2.3, 3.4, 4.5], "min": 0.3, "max": 4.4, "exemplars": [], "flags": {DataPointFlags.NO_RECORDED_VALUE}}}'

cls.exp_histogram_data_point_0 = ExponentialHistogramDataPoint(
attributes=cls.attributes_0,
Expand All @@ -101,11 +104,27 @@ def setUpClass(cls):
zero_count=0,
positive=Buckets(offset=0, bucket_counts=[1]),
negative=Buckets(offset=0, bucket_counts=[0]),
flags=0,
min=10,
max=10,
flags=DataPointFlags.get_default(),
)
cls.exp_histogram_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "count": 1, "sum": 10, "scale": 1, "zero_count": 0, "positive": {{"offset": 0, "bucket_counts": [1]}}, "negative": {{"offset": 0, "bucket_counts": [0]}}, "flags": 0, "min": 10, "max": 10, "exemplars": []}}'
cls.exp_histogram_data_point_0_str = f'{{"attributes": {cls.attributes_0_str}, "start_time_unix_nano": 1, "time_unix_nano": 2, "count": 1, "sum": 10, "scale": 1, "zero_count": 0, "positive": {{"offset": 0, "bucket_counts": [1]}}, "negative": {{"offset": 0, "bucket_counts": [0]}}, "min": 10, "max": 10, "exemplars": [], "flags": {DataPointFlags.get_default()}}}'

cls.exp_histogram_data_point_1 = ExponentialHistogramDataPoint(
attributes=cls.attributes_1,
start_time_unix_nano=2,
time_unix_nano=3,
count=2,
sum=20,
scale=2,
zero_count=1,
positive=Buckets(offset=0, bucket_counts=[1]),
negative=Buckets(offset=0, bucket_counts=[1]),
min=10,
max=20,
flags=DataPointFlags(DataPointFlags.NO_RECORDED_VALUE),
)
cls.exp_histogram_data_point_1_str = f'{{"attributes": {cls.attributes_1_str}, "start_time_unix_nano": 2, "time_unix_nano": 3, "count": 2, "sum": 20, "scale": 2, "zero_count": 1, "positive": {{"offset": 0, "bucket_counts": [1]}}, "negative": {{"offset": 0, "bucket_counts": [1]}}, "min": 10, "max": 20, "exemplars": [], "flags": {DataPointFlags.NO_RECORDED_VALUE}}}'

cls.sum_0 = Sum(
data_points=[cls.number_data_point_0, cls.number_data_point_1],
Expand Down Expand Up @@ -227,6 +246,10 @@ def test_exp_histogram_data_point(self):
self.exp_histogram_data_point_0.to_json(indent=None),
self.exp_histogram_data_point_0_str,
)
self.assertEqual(
self.exp_histogram_data_point_1.to_json(indent=None),
self.exp_histogram_data_point_1_str,
)

def test_sum(self):
self.assertEqual(self.sum_0.to_json(indent=None), self.sum_0_str)
Expand Down
Loading
Loading