Skip to content

Fix metrics sorted/deduped bug AND extract common functionality for better code usage/consistency. #2097

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@
"value": {
"intValue": "100"
}
},
{
"key": "number/int",
"value": {
"intValue": "100"
}
}
],
"droppedAttributesCount": 0
Expand Down
1 change: 1 addition & 0 deletions opentelemetry-sdk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ futures-util = { workspace = true, features = ["std", "sink", "async-await-macro
once_cell = { workspace = true }
percent-encoding = { version = "2.0", optional = true }
rand = { workspace = true, features = ["std", "std_rng","small_rng"], optional = true }
rustc-hash = "2.0"
glob = { version = "0.3.1", optional =true}
serde = { workspace = true, features = ["derive", "rc"], optional = true }
serde_json = { workspace = true, optional = true }
Expand Down
63 changes: 63 additions & 0 deletions opentelemetry-sdk/src/metrics/attribute_set.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
use std::hash::{Hash, Hasher};

use opentelemetry::{Key, KeyValue, Value};
use rustc_hash::FxHasher;

/// A unique set of attributes that can be used as instrument identifiers.
///
/// This must implement [Hash], [PartialEq], and [Eq] so it may be used as
/// HashMap keys and other de-duplication methods.
#[derive(Clone, Default, Debug, PartialEq, Eq)]
pub(crate) struct AttributeSet(Vec<KeyValue>, u64);

impl From<&[KeyValue]> for AttributeSet {
fn from(values: &[KeyValue]) -> Self {
let mut vec = Vec::from_iter(values.into_iter().cloned());
vec.sort_by(|a, b| a.key.cmp(&b.key));

// we cannot use vec.dedup_by because it will remove last duplicate not first
if vec.len() > 1 {
let mut i = vec.len() - 1;
while i != 0 {
let is_same = unsafe { vec.get_unchecked(i - 1).key == vec.get_unchecked(i).key };
if is_same {
vec.remove(i - 1);
}
i -= 1;
}
}

let hash = calculate_hash(&vec);
AttributeSet(vec, hash)
}
}

fn calculate_hash(values: &[KeyValue]) -> u64 {
let mut hasher = FxHasher::default();
values.iter().fold(&mut hasher, |mut hasher, item| {
item.hash(&mut hasher);
hasher
});
hasher.finish()
}

impl AttributeSet {
/// Iterate over key value pairs in the set
pub(crate) fn iter(&self) -> impl Iterator<Item = (&Key, &Value)> {
self.0.iter().map(|kv| (&kv.key, &kv.value))
}

pub(crate) fn into_inner(self) -> Vec<KeyValue> {
self.0
}

pub(crate) fn as_ref(&self) -> &Vec<KeyValue> {
&self.0
}
}

impl Hash for AttributeSet {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u64(self.1)
}
}
177 changes: 49 additions & 128 deletions opentelemetry-sdk/src/metrics/internal/histogram.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
use std::collections::HashSet;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::mem::take;
use std::{sync::Mutex, time::SystemTime};

use crate::metrics::data::HistogramDataPoint;
use crate::metrics::data::{self, Aggregation, Temporality};
use opentelemetry::KeyValue;

use super::Number;
use super::{collect_data_points_readonly, collect_data_points_reset, Number};
use super::{AtomicTracker, AtomicallyUpdate, Operation, ValueMap};

struct HistogramUpdate;
Expand Down Expand Up @@ -45,7 +43,6 @@
}
}

#[derive(Default)]
struct Buckets<T> {
counts: Vec<u64>,
count: u64,
Expand All @@ -61,7 +58,8 @@
counts: vec![0; n],
min: T::max(),
max: T::min(),
..Default::default()
count: 0,
total: T::default(),
}
}

Expand All @@ -80,14 +78,17 @@
}
}

fn reset(&mut self) {
for item in &mut self.counts {
*item = 0;
}
self.count = Default::default();
self.total = Default::default();
self.min = T::max();
self.max = T::min();
fn clone_and_reset(&mut self) -> Self {
let n = self.counts.len();
let res = Buckets {
counts: take(&mut self.counts),
count: self.count,
total: self.total,
min: self.min,
max: self.max,
};
*self = Buckets::new(n);
res
}
}

Expand Down Expand Up @@ -155,26 +156,27 @@
h.temporality = Temporality::Delta;
h.data_points.clear();

// Max number of data points need to account for the special casing
// of the no attribute value + overflow attribute.
let n = self.value_map.count.load(Ordering::SeqCst) + 2;
if n > h.data_points.capacity() {
h.data_points.reserve_exact(n - h.data_points.capacity());
}
let Ok(mut trackers) = self.value_map.trackers.write() else {
return (0, None);

Check warning on line 160 in opentelemetry-sdk/src/metrics/internal/histogram.rs

View check run for this annotation

Codecov / codecov/patch

opentelemetry-sdk/src/metrics/internal/histogram.rs#L160

Added line #L160 was not covered by tests
};

if self
.value_map
.has_no_attribute_value
.swap(false, Ordering::AcqRel)
{
if let Ok(ref mut b) = self.value_map.no_attribute_tracker.buckets.lock() {
h.data_points.push(HistogramDataPoint {
attributes: vec![],
collect_data_points_reset(
&self.value_map.no_attribs_tracker,
&mut trackers,
&mut h.data_points,
|attributes, tracker| {
let b = tracker
.buckets
.lock()
.unwrap_or_else(|err| err.into_inner())
.clone_and_reset();
HistogramDataPoint {
attributes,
start_time: start,
time: t,
count: b.count,
bounds: self.bounds.clone(),
bucket_counts: b.counts.clone(),
bucket_counts: b.counts,
sum: if self.record_sum {
b.total
} else {
Expand All @@ -191,54 +193,14 @@
None
},
exemplars: vec![],
});

b.reset();
}
}

let mut trackers = match self.value_map.trackers.write() {
Ok(v) => v,
Err(_) => return (0, None),
};

let mut seen = HashSet::new();
for (attrs, tracker) in trackers.drain() {
if seen.insert(Arc::as_ptr(&tracker)) {
if let Ok(b) = tracker.buckets.lock() {
h.data_points.push(HistogramDataPoint {
attributes: attrs.clone(),
start_time: start,
time: t,
count: b.count,
bounds: self.bounds.clone(),
bucket_counts: b.counts.clone(),
sum: if self.record_sum {
b.total
} else {
T::default()
},
min: if self.record_min_max {
Some(b.min)
} else {
None
},
max: if self.record_min_max {
Some(b.max)
} else {
None
},
exemplars: vec![],
});
}
}
}
},
);

// The delta collection cycle resets.
if let Ok(mut start) = self.start.lock() {
*start = t;
}
self.value_map.count.store(0, Ordering::SeqCst);

(h.data_points.len(), new_agg.map(|a| Box::new(a) as Box<_>))
}
Expand Down Expand Up @@ -266,21 +228,21 @@
h.temporality = Temporality::Cumulative;
h.data_points.clear();

// Max number of data points need to account for the special casing
// of the no attribute value + overflow attribute.
let n = self.value_map.count.load(Ordering::SeqCst) + 2;
if n > h.data_points.capacity() {
h.data_points.reserve_exact(n - h.data_points.capacity());
}
let Ok(trackers) = self.value_map.trackers.read() else {
return (0, None);

Check warning on line 232 in opentelemetry-sdk/src/metrics/internal/histogram.rs

View check run for this annotation

Codecov / codecov/patch

opentelemetry-sdk/src/metrics/internal/histogram.rs#L232

Added line #L232 was not covered by tests
};

if self
.value_map
.has_no_attribute_value
.load(Ordering::Acquire)
{
if let Ok(b) = &self.value_map.no_attribute_tracker.buckets.lock() {
h.data_points.push(HistogramDataPoint {
attributes: vec![],
collect_data_points_readonly(
&self.value_map.no_attribs_tracker,
&trackers,
&mut h.data_points,
|attributes, tracker| {
let b = tracker
.buckets
.lock()
.unwrap_or_else(|err| err.into_inner());
HistogramDataPoint {
attributes,
start_time: start,
time: t,
count: b.count,
Expand All @@ -302,50 +264,9 @@
None
},
exemplars: vec![],
});
}
}

let trackers = match self.value_map.trackers.write() {
Ok(v) => v,
Err(_) => return (0, None),
};

// TODO: This will use an unbounded amount of memory if there
// are unbounded number of attribute sets being aggregated. Attribute
// sets that become "stale" need to be forgotten so this will not
// overload the system.
let mut seen = HashSet::new();
for (attrs, tracker) in trackers.iter() {
if seen.insert(Arc::as_ptr(tracker)) {
if let Ok(b) = tracker.buckets.lock() {
h.data_points.push(HistogramDataPoint {
attributes: attrs.clone(),
start_time: start,
time: t,
count: b.count,
bounds: self.bounds.clone(),
bucket_counts: b.counts.clone(),
sum: if self.record_sum {
b.total
} else {
T::default()
},
min: if self.record_min_max {
Some(b.min)
} else {
None
},
max: if self.record_min_max {
Some(b.max)
} else {
None
},
exemplars: vec![],
});
}
}
}
},
);

(h.data_points.len(), new_agg.map(|a| Box::new(a) as Box<_>))
}
Expand Down
Loading
Loading