Skip to content

Commit eedc07e

Browse files
authored
Merge branch 'main' into cijothomas/periodicreader-defaults
2 parents bc8d97f + d67d1fc commit eedc07e

File tree

9 files changed

+155
-185
lines changed

9 files changed

+155
-185
lines changed

opentelemetry-proto/src/transform/metrics.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -295,8 +295,8 @@ pub mod tonic {
295295
.iter()
296296
.map(|dp| TonicNumberDataPoint {
297297
attributes: dp.attributes.iter().map(Into::into).collect(),
298-
start_time_unix_nano: to_nanos(dp.start_time),
299-
time_unix_nano: to_nanos(dp.time),
298+
start_time_unix_nano: to_nanos(sum.start_time),
299+
time_unix_nano: to_nanos(sum.time),
300300
exemplars: dp.exemplars.iter().map(Into::into).collect(),
301301
flags: TonicDataPointFlags::default() as u32,
302302
value: Some(dp.value.into()),
@@ -319,8 +319,8 @@ pub mod tonic {
319319
.iter()
320320
.map(|dp| TonicNumberDataPoint {
321321
attributes: dp.attributes.iter().map(Into::into).collect(),
322-
start_time_unix_nano: dp.start_time.map(to_nanos).unwrap_or_default(),
323-
time_unix_nano: to_nanos(dp.time),
322+
start_time_unix_nano: gauge.start_time.map(to_nanos).unwrap_or_default(),
323+
time_unix_nano: to_nanos(gauge.time),
324324
exemplars: dp.exemplars.iter().map(Into::into).collect(),
325325
flags: TonicDataPointFlags::default() as u32,
326326
value: Some(dp.value.into()),

opentelemetry-sdk/src/metrics/data/mod.rs

+8-14
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,6 @@ pub struct GaugeDataPoint<T> {
5959
/// Attributes is the set of key value pairs that uniquely identify the
6060
/// time series.
6161
pub attributes: Vec<KeyValue>,
62-
/// The time when the time series was started.
63-
pub start_time: Option<SystemTime>,
64-
/// The time when the time series was recorded.
65-
pub time: SystemTime,
6662
/// The value of this data point.
6763
pub value: T,
6864
/// The sampled [Exemplar]s collected during the time series.
@@ -73,8 +69,6 @@ impl<T: Copy> Clone for GaugeDataPoint<T> {
7369
fn clone(&self) -> Self {
7470
Self {
7571
attributes: self.attributes.clone(),
76-
start_time: self.start_time,
77-
time: self.time,
7872
value: self.value,
7973
exemplars: self.exemplars.clone(),
8074
}
@@ -86,6 +80,10 @@ impl<T: Copy> Clone for GaugeDataPoint<T> {
8680
pub struct Gauge<T> {
8781
/// Represents individual aggregated measurements with unique attributes.
8882
pub data_points: Vec<GaugeDataPoint<T>>,
83+
/// The time when the time series was started.
84+
pub start_time: Option<SystemTime>,
85+
/// The time when the time series was recorded.
86+
pub time: SystemTime,
8987
}
9088

9189
impl<T: fmt::Debug + Send + Sync + 'static> Aggregation for Gauge<T> {
@@ -103,10 +101,6 @@ pub struct SumDataPoint<T> {
103101
/// Attributes is the set of key value pairs that uniquely identify the
104102
/// time series.
105103
pub attributes: Vec<KeyValue>,
106-
/// The time when the time series was started.
107-
pub start_time: SystemTime,
108-
/// The time when the time series was recorded.
109-
pub time: SystemTime,
110104
/// The value of this data point.
111105
pub value: T,
112106
/// The sampled [Exemplar]s collected during the time series.
@@ -117,8 +111,6 @@ impl<T: Copy> Clone for SumDataPoint<T> {
117111
fn clone(&self) -> Self {
118112
Self {
119113
attributes: self.attributes.clone(),
120-
start_time: self.start_time,
121-
time: self.time,
122114
value: self.value,
123115
exemplars: self.exemplars.clone(),
124116
}
@@ -130,6 +122,10 @@ impl<T: Copy> Clone for SumDataPoint<T> {
130122
pub struct Sum<T> {
131123
/// Represents individual aggregated measurements with unique attributes.
132124
pub data_points: Vec<SumDataPoint<T>>,
125+
/// The time when the time series was started.
126+
pub start_time: SystemTime,
127+
/// The time when the time series was recorded.
128+
pub time: SystemTime,
133129
/// Describes if the aggregation is reported as the change from the last report
134130
/// time, or the cumulative changes since a fixed start time.
135131
pub temporality: Temporality,
@@ -366,8 +362,6 @@ mod tests {
366362
fn validate_cloning_data_points() {
367363
let data_type = SumDataPoint {
368364
attributes: vec![KeyValue::new("key", "value")],
369-
start_time: std::time::SystemTime::now(),
370-
time: std::time::SystemTime::now(),
371365
value: 0u32,
372366
exemplars: vec![Exemplar {
373367
filtered_attributes: vec![],

opentelemetry-sdk/src/metrics/internal/aggregate.rs

+15-38
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,7 @@ use std::{marker, sync::Arc};
22

33
use opentelemetry::KeyValue;
44

5-
use crate::metrics::{
6-
data::{Aggregation, Gauge},
7-
Temporality,
8-
};
5+
use crate::metrics::{data::Aggregation, Temporality};
96

107
use super::{
118
exponential_histogram::ExpoHistogram, histogram::Histogram, last_value::LastValue,
@@ -99,31 +96,15 @@ impl<T: Number> AggregateBuilder<T> {
9996

10097
/// Builds a last-value aggregate function input and output.
10198
pub(crate) fn last_value(&self) -> (impl Measure<T>, impl ComputeAggregation) {
102-
let lv_filter = Arc::new(LastValue::new());
103-
let lv_agg = Arc::clone(&lv_filter);
99+
let lv = Arc::new(LastValue::new());
100+
let agg_lv = Arc::clone(&lv);
104101
let t = self.temporality;
105102

106103
(
107-
self.filter(move |n, a: &[KeyValue]| lv_filter.measure(n, a)),
108-
move |dest: Option<&mut dyn Aggregation>| {
109-
let g = dest.and_then(|d| d.as_mut().downcast_mut::<Gauge<T>>());
110-
let mut new_agg = if g.is_none() {
111-
Some(Gauge {
112-
data_points: vec![],
113-
})
114-
} else {
115-
None
116-
};
117-
let g = g.unwrap_or_else(|| new_agg.as_mut().expect("present if g is none"));
118-
119-
match t {
120-
Some(Temporality::Delta) => {
121-
lv_agg.compute_aggregation_delta(&mut g.data_points)
122-
}
123-
_ => lv_agg.compute_aggregation_cumulative(&mut g.data_points),
124-
}
125-
126-
(g.data_points.len(), new_agg.map(|a| Box::new(a) as Box<_>))
104+
self.filter(move |n, a: &[KeyValue]| lv.measure(n, a)),
105+
move |dest: Option<&mut dyn Aggregation>| match t {
106+
Some(Temporality::Delta) => agg_lv.delta(dest),
107+
_ => agg_lv.cumulative(dest),
127108
},
128109
)
129110
}
@@ -211,8 +192,8 @@ impl<T: Number> AggregateBuilder<T> {
211192
#[cfg(test)]
212193
mod tests {
213194
use crate::metrics::data::{
214-
ExponentialBucket, ExponentialHistogram, ExponentialHistogramDataPoint, GaugeDataPoint,
215-
Histogram, HistogramDataPoint, Sum, SumDataPoint,
195+
ExponentialBucket, ExponentialHistogram, ExponentialHistogramDataPoint, Gauge,
196+
GaugeDataPoint, Histogram, HistogramDataPoint, Sum, SumDataPoint,
216197
};
217198
use std::{time::SystemTime, vec};
218199

@@ -224,11 +205,11 @@ mod tests {
224205
let mut a = Gauge {
225206
data_points: vec![GaugeDataPoint {
226207
attributes: vec![KeyValue::new("a", 1)],
227-
start_time: Some(SystemTime::now()),
228-
time: SystemTime::now(),
229208
value: 1u64,
230209
exemplars: vec![],
231210
}],
211+
start_time: Some(SystemTime::now()),
212+
time: SystemTime::now(),
232213
};
233214
let new_attributes = [KeyValue::new("b", 2)];
234215
measure.call(2, &new_attributes[..]);
@@ -251,19 +232,17 @@ mod tests {
251232
data_points: vec![
252233
SumDataPoint {
253234
attributes: vec![KeyValue::new("a1", 1)],
254-
start_time: SystemTime::now(),
255-
time: SystemTime::now(),
256235
value: 1u64,
257236
exemplars: vec![],
258237
},
259238
SumDataPoint {
260239
attributes: vec![KeyValue::new("a2", 1)],
261-
start_time: SystemTime::now(),
262-
time: SystemTime::now(),
263240
value: 2u64,
264241
exemplars: vec![],
265242
},
266243
],
244+
start_time: SystemTime::now(),
245+
time: SystemTime::now(),
267246
temporality: if temporality == Temporality::Delta {
268247
Temporality::Cumulative
269248
} else {
@@ -294,19 +273,17 @@ mod tests {
294273
data_points: vec![
295274
SumDataPoint {
296275
attributes: vec![KeyValue::new("a1", 1)],
297-
start_time: SystemTime::now(),
298-
time: SystemTime::now(),
299276
value: 1u64,
300277
exemplars: vec![],
301278
},
302279
SumDataPoint {
303280
attributes: vec![KeyValue::new("a2", 1)],
304-
start_time: SystemTime::now(),
305-
time: SystemTime::now(),
306281
value: 2u64,
307282
exemplars: vec![],
308283
},
309284
],
285+
start_time: SystemTime::now(),
286+
time: SystemTime::now(),
310287
temporality: if temporality == Temporality::Delta {
311288
Temporality::Cumulative
312289
} else {

opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs

+4-64
Original file line numberDiff line numberDiff line change
@@ -1440,15 +1440,14 @@ mod tests {
14401440
count = out_fn.call(Some(got.as_mut())).0
14411441
}
14421442

1443-
assert_aggregation_eq::<T>(Box::new(test.want), got, true, test.name);
1443+
assert_aggregation_eq::<T>(Box::new(test.want), got, test.name);
14441444
assert_eq!(test.want_count, count, "{}", test.name);
14451445
}
14461446
}
14471447

14481448
fn assert_aggregation_eq<T: Number + PartialEq>(
14491449
a: Box<dyn Aggregation>,
14501450
b: Box<dyn Aggregation>,
1451-
ignore_timestamp: bool,
14521451
test_name: &'static str,
14531452
) {
14541453
assert_eq!(
@@ -1467,13 +1466,7 @@ mod tests {
14671466
test_name
14681467
);
14691468
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
1470-
assert_gauge_data_points_eq(
1471-
a,
1472-
b,
1473-
ignore_timestamp,
1474-
"mismatching gauge data points",
1475-
test_name,
1476-
);
1469+
assert_gauge_data_points_eq(a, b, "mismatching gauge data points", test_name);
14771470
}
14781471
} else if let Some(a) = a.as_any().downcast_ref::<data::Sum<T>>() {
14791472
let b = b.as_any().downcast_ref::<data::Sum<T>>().unwrap();
@@ -1494,13 +1487,7 @@ mod tests {
14941487
test_name
14951488
);
14961489
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
1497-
assert_sum_data_points_eq(
1498-
a,
1499-
b,
1500-
ignore_timestamp,
1501-
"mismatching sum data points",
1502-
test_name,
1503-
);
1490+
assert_sum_data_points_eq(a, b, "mismatching sum data points", test_name);
15041491
}
15051492
} else if let Some(a) = a.as_any().downcast_ref::<data::Histogram<T>>() {
15061493
let b = b.as_any().downcast_ref::<data::Histogram<T>>().unwrap();
@@ -1516,13 +1503,7 @@ mod tests {
15161503
test_name
15171504
);
15181505
for (a, b) in a.data_points.iter().zip(b.data_points.iter()) {
1519-
assert_hist_data_points_eq(
1520-
a,
1521-
b,
1522-
ignore_timestamp,
1523-
"mismatching hist data points",
1524-
test_name,
1525-
);
1506+
assert_hist_data_points_eq(a, b, "mismatching hist data points", test_name);
15261507
}
15271508
} else if let Some(a) = a.as_any().downcast_ref::<data::ExponentialHistogram<T>>() {
15281509
let b = b
@@ -1544,7 +1525,6 @@ mod tests {
15441525
assert_exponential_hist_data_points_eq(
15451526
a,
15461527
b,
1547-
ignore_timestamp,
15481528
"mismatching hist data points",
15491529
test_name,
15501530
);
@@ -1557,7 +1537,6 @@ mod tests {
15571537
fn assert_sum_data_points_eq<T: Number>(
15581538
a: &data::SumDataPoint<T>,
15591539
b: &data::SumDataPoint<T>,
1560-
ignore_timestamp: bool,
15611540
message: &'static str,
15621541
test_name: &'static str,
15631542
) {
@@ -1567,21 +1546,11 @@ mod tests {
15671546
test_name, message
15681547
);
15691548
assert_eq!(a.value, b.value, "{}: {} value", test_name, message);
1570-
1571-
if !ignore_timestamp {
1572-
assert_eq!(
1573-
a.start_time, b.start_time,
1574-
"{}: {} start time",
1575-
test_name, message
1576-
);
1577-
assert_eq!(a.time, b.time, "{}: {} time", test_name, message);
1578-
}
15791549
}
15801550

15811551
fn assert_gauge_data_points_eq<T: Number>(
15821552
a: &data::GaugeDataPoint<T>,
15831553
b: &data::GaugeDataPoint<T>,
1584-
ignore_timestamp: bool,
15851554
message: &'static str,
15861555
test_name: &'static str,
15871556
) {
@@ -1591,21 +1560,11 @@ mod tests {
15911560
test_name, message
15921561
);
15931562
assert_eq!(a.value, b.value, "{}: {} value", test_name, message);
1594-
1595-
if !ignore_timestamp {
1596-
assert_eq!(
1597-
a.start_time, b.start_time,
1598-
"{}: {} start time",
1599-
test_name, message
1600-
);
1601-
assert_eq!(a.time, b.time, "{}: {} time", test_name, message);
1602-
}
16031563
}
16041564

16051565
fn assert_hist_data_points_eq<T: Number>(
16061566
a: &data::HistogramDataPoint<T>,
16071567
b: &data::HistogramDataPoint<T>,
1608-
ignore_timestamp: bool,
16091568
message: &'static str,
16101569
test_name: &'static str,
16111570
) {
@@ -1624,21 +1583,11 @@ mod tests {
16241583
assert_eq!(a.min, b.min, "{}: {} min", test_name, message);
16251584
assert_eq!(a.max, b.max, "{}: {} max", test_name, message);
16261585
assert_eq!(a.sum, b.sum, "{}: {} sum", test_name, message);
1627-
1628-
if !ignore_timestamp {
1629-
assert_eq!(
1630-
a.start_time, b.start_time,
1631-
"{}: {} start time",
1632-
test_name, message
1633-
);
1634-
assert_eq!(a.time, b.time, "{}: {} time", test_name, message);
1635-
}
16361586
}
16371587

16381588
fn assert_exponential_hist_data_points_eq<T: Number>(
16391589
a: &data::ExponentialHistogramDataPoint<T>,
16401590
b: &data::ExponentialHistogramDataPoint<T>,
1641-
ignore_timestamp: bool,
16421591
message: &'static str,
16431592
test_name: &'static str,
16441593
) {
@@ -1669,14 +1618,5 @@ mod tests {
16691618
"{}: {} neg",
16701619
test_name, message
16711620
);
1672-
1673-
if !ignore_timestamp {
1674-
assert_eq!(
1675-
a.start_time, b.start_time,
1676-
"{}: {} start time",
1677-
test_name, message
1678-
);
1679-
assert_eq!(a.time, b.time, "{}: {} time", test_name, message);
1680-
}
16811621
}
16821622
}

0 commit comments

Comments
 (0)