opentelemetry_sdk/metrics/
aggregation.rs

1use std::fmt;
2
3use crate::metrics::internal::{EXPO_MAX_SCALE, EXPO_MIN_SCALE};
4use opentelemetry::metrics::{MetricsError, Result};
5
6/// The way recorded measurements are summarized.
7#[derive(Clone, Debug, PartialEq)]
8#[non_exhaustive]
9pub enum Aggregation {
10    /// An aggregation that drops all recorded data.
11    Drop,
12
13    /// An aggregation that uses the default instrument kind selection mapping to
14    /// select another aggregation.
15    ///
16    /// A metric reader can be configured to make an aggregation selection based on
17    /// instrument kind that differs from the default. This aggregation ensures the
18    /// default is used.
19    ///
20    /// See the [DefaultAggregationSelector] for information about the default
21    /// instrument kind selection mapping.
22    ///
23    /// [DefaultAggregationSelector]: crate::metrics::reader::DefaultAggregationSelector
24    Default,
25
26    /// An aggregation that summarizes a set of measurements as their arithmetic
27    /// sum.
28    Sum,
29
30    /// An aggregation that summarizes a set of measurements as the last one made.
31    LastValue,
32
33    /// An aggregation that summarizes a set of measurements as a histogram with
34    /// explicitly defined buckets.
35    ExplicitBucketHistogram {
36        /// The increasing bucket boundary values.
37        ///
38        /// Boundary values define bucket upper bounds. Buckets are exclusive of their
39        /// lower boundary and inclusive of their upper bound (except at positive
40        /// infinity). A measurement is defined to fall into the greatest-numbered
41        /// bucket with a boundary that is greater than or equal to the measurement. As
42        /// an example, boundaries defined as:
43        ///
44        /// vec![0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 750.0,
45        /// 1000.0, 2500.0, 5000.0, 7500.0, 10000.0];
46        ///
47        /// Will define these buckets:
48        ///
49        /// (-∞, 0], (0, 5.0], (5.0, 10.0], (10.0, 25.0], (25.0, 50.0], (50.0,
50        ///  75.0], (75.0, 100.0], (100.0, 250.0], (250.0, 500.0], (500.0,
51        ///  750.0], (750.0, 1000.0], (1000.0, 2500.0], (2500.0, 5000.0],
52        ///  (5000.0, 7500.0], (7500.0, 10000.0], (10000.0, +∞)
53        boundaries: Vec<f64>,
54
55        /// Indicates whether to not record the min and max of the distribution.
56        ///
57        /// By default, these values are recorded.
58        ///
59        /// Recording these values for cumulative data is expected to have little
60        /// value, they will represent the entire life of the instrument instead of
61        /// just the current collection cycle. It is recommended to set this to
62        /// `false` for that type of data to avoid computing the low-value
63        /// instances.
64        record_min_max: bool,
65    },
66
67    /// An aggregation that summarizes a set of measurements as a histogram with
68    /// bucket widths that grow exponentially.
69    Base2ExponentialHistogram {
70        /// The maximum number of buckets to use for the histogram.
71        max_size: u32,
72
73        /// The maximum resolution scale to use for the histogram.
74        ///
75        /// The maximum value is `20`, in which case the maximum number of buckets
76        /// that can fit within the range of a signed 32-bit integer index could be
77        /// used.
78        ///
79        /// The minimum value is `-10` in which case only two buckets will be used.
80        max_scale: i8,
81
82        /// Indicates whether to not record the min and max of the distribution.
83        ///
84        /// By default, these values are recorded.
85        ///
86        /// It is generally not valuable to record min and max for cumulative data
87        /// as they will represent the entire life of the instrument instead of just
88        /// the current collection cycle, you can opt out by setting this value to
89        /// `false`
90        record_min_max: bool,
91    },
92}
93
94impl fmt::Display for Aggregation {
95    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
96        // used for stream id comparisons
97        let name = match self {
98            Aggregation::Drop => "Drop",
99            Aggregation::Default => "Default",
100            Aggregation::Sum => "Sum",
101            Aggregation::LastValue => "LastValue",
102            Aggregation::ExplicitBucketHistogram { .. } => "ExplicitBucketHistogram",
103            Aggregation::Base2ExponentialHistogram { .. } => "Base2ExponentialHistogram",
104        };
105
106        f.write_str(name)
107    }
108}
109
110impl Aggregation {
111    /// Validate that this aggregation has correct configuration
112    pub fn validate(&self) -> Result<()> {
113        match self {
114            Aggregation::Drop => Ok(()),
115            Aggregation::Default => Ok(()),
116            Aggregation::Sum => Ok(()),
117            Aggregation::LastValue => Ok(()),
118            Aggregation::ExplicitBucketHistogram { boundaries, .. } => {
119                for x in boundaries.windows(2) {
120                    if x[0] >= x[1] {
121                        return Err(MetricsError::Config(format!(
122                            "aggregation: explicit bucket histogram: non-monotonic boundaries: {:?}",
123                            boundaries,
124                        )));
125                    }
126                }
127
128                Ok(())
129            }
130            Aggregation::Base2ExponentialHistogram { max_scale, .. } => {
131                if *max_scale > EXPO_MAX_SCALE {
132                    return Err(MetricsError::Config(format!(
133                        "aggregation: exponential histogram: max scale ({}) is greater than 20",
134                        max_scale,
135                    )));
136                }
137                if *max_scale < EXPO_MIN_SCALE {
138                    return Err(MetricsError::Config(format!(
139                        "aggregation: exponential histogram: max scale ({}) is less than -10",
140                        max_scale,
141                    )));
142                }
143
144                Ok(())
145            }
146        }
147    }
148}
149
150#[cfg(test)]
151mod tests {
152    use crate::metrics::{
153        internal::{EXPO_MAX_SCALE, EXPO_MIN_SCALE},
154        Aggregation,
155    };
156    use opentelemetry::metrics::{MetricsError, Result};
157
158    #[test]
159    fn validate_aggregation() {
160        struct TestCase {
161            name: &'static str,
162            input: Aggregation,
163            check: Box<dyn Fn(Result<()>) -> bool>,
164        }
165        let ok = Box::new(|result: Result<()>| result.is_ok());
166        let config_error = Box::new(|result| matches!(result, Err(MetricsError::Config(_))));
167
168        let test_cases: Vec<TestCase> = vec![
169            TestCase {
170                name: "base2 histogram with maximum max_scale",
171                input: Aggregation::Base2ExponentialHistogram {
172                    max_size: 160,
173                    max_scale: EXPO_MAX_SCALE,
174                    record_min_max: true,
175                },
176                check: ok.clone(),
177            },
178            TestCase {
179                name: "base2 histogram with minimum max_scale",
180                input: Aggregation::Base2ExponentialHistogram {
181                    max_size: 160,
182                    max_scale: EXPO_MIN_SCALE,
183                    record_min_max: true,
184                },
185                check: ok.clone(),
186            },
187            TestCase {
188                name: "base2 histogram with max_scale too small",
189                input: Aggregation::Base2ExponentialHistogram {
190                    max_size: 160,
191                    max_scale: EXPO_MIN_SCALE - 1,
192                    record_min_max: true,
193                },
194                check: config_error.clone(),
195            },
196            TestCase {
197                name: "base2 histogram with max_scale too big",
198                input: Aggregation::Base2ExponentialHistogram {
199                    max_size: 160,
200                    max_scale: EXPO_MAX_SCALE + 1,
201                    record_min_max: true,
202                },
203                check: config_error.clone(),
204            },
205            TestCase {
206                name: "explicit histogram with one boundary",
207                input: Aggregation::ExplicitBucketHistogram {
208                    boundaries: vec![0.0],
209                    record_min_max: true,
210                },
211                check: ok.clone(),
212            },
213            TestCase {
214                name: "explicit histogram with monotonic boundaries",
215                input: Aggregation::ExplicitBucketHistogram {
216                    boundaries: vec![0.0, 2.0, 4.0, 8.0],
217                    record_min_max: true,
218                },
219                check: ok.clone(),
220            },
221            TestCase {
222                name: "explicit histogram with non-monotonic boundaries",
223                input: Aggregation::ExplicitBucketHistogram {
224                    boundaries: vec![2.0, 0.0, 4.0, 8.0],
225                    record_min_max: true,
226                },
227                check: config_error.clone(),
228            },
229        ];
230        for test in test_cases {
231            assert!((test.check)(test.input.validate()), "{}", test.name)
232        }
233    }
234}