Skip to content

Commit 1f05dcf

Browse files
committed
add method to calculate data point size
1 parent fd858f6 commit 1f05dcf

File tree

1 file changed

+29
-27
lines changed
  • opentelemetry-sdk/src/metrics/internal

1 file changed

+29
-27
lines changed

opentelemetry-sdk/src/metrics/internal/sum.rs

+29-27
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
1+
use std::sync::atomic::{AtomicBool, Ordering};
22
use std::{
33
collections::{hash_map::Entry, HashMap},
44
sync::{Arc, Mutex},
@@ -21,7 +21,6 @@ struct ValueMap<T: Number<T>> {
2121
buckets: Arc<[Mutex<Option<HashMap<AttributeSet, T>>>; 256]>,
2222
has_no_value_attribute_value: AtomicBool,
2323
no_attribute_value: T::AtomicTracker,
24-
total_count: AtomicUsize,
2524
}
2625

2726
impl<T: Number<T>> Default for ValueMap<T> {
@@ -42,7 +41,6 @@ impl<T: Number<T>> ValueMap<T> {
4241
buckets: Arc::new(buckets),
4342
has_no_value_attribute_value: AtomicBool::new(false),
4443
no_attribute_value: T::new_atomic_tracker(),
45-
total_count: AtomicUsize::new(0),
4644
}
4745
}
4846

@@ -53,6 +51,17 @@ impl<T: Number<T>> ValueMap<T> {
5351
// Use the 8 least significant bits directly, avoiding the modulus operation.
5452
hasher.finish() as u8
5553
}
54+
55+
// Calculate the total length of data points across all buckets.
56+
fn total_data_points_count(&self) -> usize {
57+
self.buckets
58+
.iter()
59+
.map(|bucket_mutex| {
60+
let locked_bucket = bucket_mutex.lock().unwrap();
61+
locked_bucket.as_ref().map_or(0, |bucket| bucket.len())
62+
})
63+
.sum::<usize>()
64+
}
5665
}
5766

5867
impl<T: Number<T>> ValueMap<T> {
@@ -80,7 +89,6 @@ impl<T: Number<T>> ValueMap<T> {
8089
Entry::Vacant(vacant_entry) => {
8190
if is_under_cardinality_limit(size) {
8291
vacant_entry.insert(measurement);
83-
self.total_count.fetch_add(1, Ordering::SeqCst);
8492
} else {
8593
// TBD - Update total_count ??
8694
values
@@ -142,15 +150,11 @@ impl<T: Number<T>> Sum<T> {
142150
s_data.is_monotonic = self.monotonic;
143151
s_data.data_points.clear();
144152

145-
let total_len = self.value_map.total_count.load(Ordering::SeqCst) + 1;
153+
let total_len: usize = self.value_map.total_data_points_count() + 1;
146154
if total_len > s_data.data_points.capacity() {
147-
s_data
148-
.data_points
149-
.reserve_exact(total_len - s_data.data_points.capacity());
150-
};
151-
s_data
152-
.data_points
153-
.reserve_exact(self.value_map.total_count.load(Ordering::SeqCst));
155+
let additional_space_needed = total_len - s_data.data_points.capacity();
156+
s_data.data_points.reserve_exact(additional_space_needed);
157+
}
154158

155159
let prev_start = self.start.lock().map(|start| *start).unwrap_or(t);
156160
if self
@@ -214,12 +218,11 @@ impl<T: Number<T>> Sum<T> {
214218
s_data.is_monotonic = self.monotonic;
215219
s_data.data_points.clear();
216220

217-
let total_len = self.value_map.total_count.load(Ordering::SeqCst) + 1;
221+
let total_len: usize = self.value_map.total_data_points_count() + 1;
218222
if total_len > s_data.data_points.capacity() {
219-
s_data
220-
.data_points
221-
.reserve_exact(total_len - s_data.data_points.capacity());
222-
};
223+
let additional_space_needed = total_len - s_data.data_points.capacity();
224+
s_data.data_points.reserve_exact(additional_space_needed);
225+
}
223226

224227
let prev_start = self.start.lock().map(|start| *start).unwrap_or(t);
225228

@@ -306,12 +309,11 @@ impl<T: Number<T>> PrecomputedSum<T> {
306309
s_data.temporality = Temporality::Delta;
307310
s_data.is_monotonic = self.monotonic;
308311

309-
let total_len = self.value_map.total_count.load(Ordering::SeqCst) + 1;
312+
let total_len: usize = self.value_map.total_data_points_count() + 1;
310313
if total_len > s_data.data_points.capacity() {
311-
s_data
312-
.data_points
313-
.reserve_exact(total_len - s_data.data_points.capacity());
314-
};
314+
let additional_space_needed = total_len - s_data.data_points.capacity();
315+
s_data.data_points.reserve_exact(additional_space_needed);
316+
}
315317

316318
let mut new_reported = HashMap::with_capacity(total_len);
317319
let mut reported = match self.reported.lock() {
@@ -387,12 +389,12 @@ impl<T: Number<T>> PrecomputedSum<T> {
387389
s_data.data_points.clear();
388390
s_data.temporality = Temporality::Cumulative;
389391
s_data.is_monotonic = self.monotonic;
390-
let total_len = self.value_map.total_count.load(Ordering::SeqCst) + 1;
392+
393+
let total_len: usize = self.value_map.total_data_points_count() + 1;
391394
if total_len > s_data.data_points.capacity() {
392-
s_data
393-
.data_points
394-
.reserve_exact(total_len - s_data.data_points.capacity());
395-
};
395+
let additional_space_needed = total_len - s_data.data_points.capacity();
396+
s_data.data_points.reserve_exact(additional_space_needed);
397+
}
396398

397399
let mut new_reported = HashMap::with_capacity(total_len);
398400
let mut reported = match self.reported.lock() {

0 commit comments

Comments
 (0)