diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/Cargo.toml b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/Cargo.toml index d274704457..9662146c4c 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/Cargo.toml +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/Cargo.toml @@ -16,6 +16,7 @@ edition = "2021" rust-version = "1.65" [dependencies] +oak_core = { path = "../../../../oak_core" } opentelemetry_rk = { version = "0.23", path = "../opentelemetry/" } async-std = { version = "1.10", features = ["unstable"], optional = true } hashbrown = "*" diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/data/mod.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/data/mod.rs index 0654ba836c..fe96309746 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/data/mod.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/data/mod.rs @@ -3,7 +3,6 @@ extern crate alloc; use alloc::borrow::Cow; use core::{any, fmt}; -use std::time::SystemTime; use opentelemetry_rk::KeyValue; @@ -101,10 +100,6 @@ pub struct DataPoint { /// Attributes is the set of key value pairs that uniquely identify the /// time series. pub attributes: Vec, - /// The time when the time series was started. - pub start_time: Option, - /// The time when the time series was recorded. - pub time: Option, /// The value of this data point. pub value: T, /// The sampled [Exemplar]s collected during the time series. @@ -115,8 +110,6 @@ impl Clone for DataPoint { fn clone(&self) -> Self { Self { attributes: self.attributes.clone(), - start_time: self.start_time, - time: self.time, value: self.value, exemplars: self.exemplars.clone(), } @@ -147,10 +140,6 @@ impl Aggregation for Histogram { pub struct HistogramDataPoint { /// The set of key value pairs that uniquely identify the time series. pub attributes: Vec, - /// The time when the time series was started. - pub start_time: SystemTime, - /// The time when the time series was recorded. - pub time: SystemTime, /// The number of updates this histogram has been calculated with. pub count: u64, @@ -176,8 +165,6 @@ impl Clone for HistogramDataPoint { fn clone(&self) -> Self { Self { attributes: self.attributes.clone(), - start_time: self.start_time, - time: self.time, count: self.count, bounds: self.bounds.clone(), bucket_counts: self.bucket_counts.clone(), @@ -214,10 +201,6 @@ impl Aggregation for ExponentialHistogram pub struct ExponentialHistogramDataPoint { /// The set of key value pairs that uniquely identify the time series. pub attributes: Vec, - /// When the time series was started. - pub start_time: SystemTime, - /// The time when the time series was recorded. - pub time: SystemTime, /// The number of updates this histogram has been calculated with. pub count: usize, @@ -277,8 +260,6 @@ pub struct Exemplar { /// The attributes recorded with the measurement but filtered out of the /// time series' aggregated data. pub filtered_attributes: Vec, - /// The time when the measurement was recorded. - pub time: SystemTime, /// The measured value. pub value: T, /// The ID of the span that was active during the measurement. @@ -295,7 +276,6 @@ impl Clone for Exemplar { fn clone(&self) -> Self { Self { filtered_attributes: self.filtered_attributes.clone(), - time: self.time, value: self.value, span_id: self.span_id, trace_id: self.trace_id, diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/aggregate.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/aggregate.rs index f8d0305403..15b3e310d8 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/aggregate.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/aggregate.rs @@ -220,7 +220,8 @@ mod tests { DataPoint, ExponentialBucket, ExponentialHistogram, ExponentialHistogramDataPoint, Histogram, HistogramDataPoint, Sum, }; - use std::{time::SystemTime, vec}; + + use std::vec; use super::*; @@ -230,8 +231,6 @@ mod tests { let mut a = Gauge { data_points: vec![DataPoint { attributes: vec![KeyValue::new("a", 1)], - start_time: Some(SystemTime::now()), - time: Some(SystemTime::now()), value: 1u64, exemplars: vec![], }], @@ -257,15 +256,11 @@ mod tests { data_points: vec![ DataPoint { attributes: vec![KeyValue::new("a1", 1)], - start_time: Some(SystemTime::now()), - time: Some(SystemTime::now()), value: 1u64, exemplars: vec![], }, DataPoint { attributes: vec![KeyValue::new("a2", 1)], - start_time: Some(SystemTime::now()), - time: Some(SystemTime::now()), value: 2u64, exemplars: vec![], }, @@ -300,15 +295,11 @@ mod tests { data_points: vec![ DataPoint { attributes: vec![KeyValue::new("a1", 1)], - start_time: Some(SystemTime::now()), - time: Some(SystemTime::now()), value: 1u64, exemplars: vec![], }, DataPoint { attributes: vec![KeyValue::new("a2", 1)], - start_time: Some(SystemTime::now()), - time: Some(SystemTime::now()), value: 2u64, exemplars: vec![], }, @@ -343,8 +334,6 @@ mod tests { let mut a = Histogram { data_points: vec![HistogramDataPoint { attributes: vec![KeyValue::new("a1", 1)], - start_time: SystemTime::now(), - time: SystemTime::now(), count: 2, bounds: vec![1.0, 2.0], bucket_counts: vec![0, 1, 1], @@ -386,8 +375,6 @@ mod tests { let mut a = ExponentialHistogram { data_points: vec![ExponentialHistogramDataPoint { attributes: vec![KeyValue::new("a1", 1)], - start_time: SystemTime::now(), - time: SystemTime::now(), count: 2, min: None, max: None, diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs index 0acf32260a..1032e9fd6f 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/exponential_histogram.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, f64::consts::LOG2_E, sync::Mutex, time::SystemTime}; +use std::{collections::HashMap, f64::consts::LOG2_E, sync::Mutex}; use once_cell::sync::Lazy; use opentelemetry_rk::{metrics::MetricsError, KeyValue}; @@ -318,8 +318,6 @@ pub(crate) struct ExpoHistogram { max_scale: i8, values: Mutex>>, - - start: Mutex, } impl> ExpoHistogram { @@ -336,7 +334,6 @@ impl> ExpoHistogram { max_size: max_size as i32, max_scale, values: Mutex::new(HashMap::default()), - start: Mutex::new(SystemTime::now()), } } @@ -364,12 +361,6 @@ impl> ExpoHistogram { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); - let start = self - .start - .lock() - .map(|s| *s) - .unwrap_or_else(|_| SystemTime::now()); let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { @@ -400,8 +391,6 @@ impl> ExpoHistogram { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: start, - time: t, count: b.count, min: if self.record_min_max { Some(b.min) @@ -428,12 +417,6 @@ impl> ExpoHistogram { exemplars: vec![], }); } - - // The delta collection cycle resets. - if let Ok(mut start) = self.start.lock() { - *start = t; - } - (n, new_agg.map(|a| Box::new(a) as Box<_>)) } @@ -441,12 +424,6 @@ impl> ExpoHistogram { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); - let start = self - .start - .lock() - .map(|s| *s) - .unwrap_or_else(|_| SystemTime::now()); let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { @@ -481,8 +458,6 @@ impl> ExpoHistogram { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: start, - time: t, count: b.count, min: if self.record_min_max { Some(b.min) @@ -1271,8 +1246,6 @@ mod tests { min: Some(1.into()), max: Some(16.into()), sum: 31.into(), - start_time: SystemTime::now(), - time: SystemTime::now(), scale: -1, positive_bucket: data::ExponentialBucket { offset: -1, @@ -1319,8 +1292,6 @@ mod tests { offset: -1, counts: vec![1, 4, 1], }, - start_time: SystemTime::now(), - time: SystemTime::now(), negative_bucket: data::ExponentialBucket { offset: 0, counts: vec![], @@ -1365,8 +1336,6 @@ mod tests { offset: -1, counts: vec![1, 4, 1], }, - start_time: SystemTime::now(), - time: SystemTime::now(), negative_bucket: data::ExponentialBucket { offset: 0, counts: vec![], @@ -1411,8 +1380,6 @@ mod tests { counts: vec![1, 6, 2], }, attributes: vec![], - start_time: SystemTime::now(), - time: SystemTime::now(), negative_bucket: data::ExponentialBucket { offset: 0, counts: vec![], @@ -1441,7 +1408,7 @@ mod tests { count = out_fn.call(Some(got.as_mut())).0 } - assert_aggregation_eq::(Box::new(test.want), got, true, test.name); + assert_aggregation_eq::(Box::new(test.want), got, test.name); assert_eq!(test.want_count, count, "{}", test.name); } } @@ -1449,7 +1416,6 @@ mod tests { fn assert_aggregation_eq + PartialEq>( a: Box, b: Box, - ignore_timestamp: bool, test_name: &'static str, ) { assert_eq!( @@ -1471,7 +1437,6 @@ mod tests { assert_data_points_eq( a, b, - ignore_timestamp, "mismatching gauge data points", test_name, ); @@ -1498,7 +1463,6 @@ mod tests { assert_data_points_eq( a, b, - ignore_timestamp, "mismatching sum data points", test_name, ); @@ -1520,7 +1484,6 @@ mod tests { assert_hist_data_points_eq( a, b, - ignore_timestamp, "mismatching hist data points", test_name, ); @@ -1545,7 +1508,6 @@ mod tests { assert_exponential_hist_data_points_eq( a, b, - ignore_timestamp, "mismatching hist data points", test_name, ); @@ -1558,7 +1520,6 @@ mod tests { fn assert_data_points_eq>( a: &data::DataPoint, b: &data::DataPoint, - ignore_timestamp: bool, message: &'static str, test_name: &'static str, ) { @@ -1568,21 +1529,11 @@ mod tests { test_name, message ); assert_eq!(a.value, b.value, "{}: {} value", test_name, message); - - if !ignore_timestamp { - assert_eq!( - a.start_time, b.start_time, - "{}: {} start time", - test_name, message - ); - assert_eq!(a.time, b.time, "{}: {} time", test_name, message); - } } fn assert_hist_data_points_eq>( a: &data::HistogramDataPoint, b: &data::HistogramDataPoint, - ignore_timestamp: bool, message: &'static str, test_name: &'static str, ) { @@ -1601,21 +1552,11 @@ mod tests { assert_eq!(a.min, b.min, "{}: {} min", test_name, message); assert_eq!(a.max, b.max, "{}: {} max", test_name, message); assert_eq!(a.sum, b.sum, "{}: {} sum", test_name, message); - - if !ignore_timestamp { - assert_eq!( - a.start_time, b.start_time, - "{}: {} start time", - test_name, message - ); - assert_eq!(a.time, b.time, "{}: {} time", test_name, message); - } } fn assert_exponential_hist_data_points_eq>( a: &data::ExponentialHistogramDataPoint, b: &data::ExponentialHistogramDataPoint, - ignore_timestamp: bool, message: &'static str, test_name: &'static str, ) { @@ -1646,14 +1587,5 @@ mod tests { "{}: {} neg", test_name, message ); - - if !ignore_timestamp { - assert_eq!( - a.start_time, b.start_time, - "{}: {} start time", - test_name, message - ); - assert_eq!(a.time, b.time, "{}: {} time", test_name, message); - } } } diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/histogram.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/histogram.rs index b5c020ce8e..24db5dcc4b 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/histogram.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/histogram.rs @@ -1,5 +1,5 @@ use hashbrown::HashMap; -use std::{sync::Mutex, time::SystemTime}; +use std::sync::Mutex; use crate::metrics::data::{self, Aggregation, Temporality}; use crate::{metrics::data::HistogramDataPoint, metrics::AttributeSet}; @@ -117,7 +117,6 @@ impl> HistValues { pub(crate) struct Histogram { hist_values: HistValues, record_min_max: bool, - start: Mutex, } impl> Histogram { @@ -125,7 +124,6 @@ impl> Histogram { Histogram { hist_values: HistValues::new(boundaries, record_sum), record_min_max, - start: Mutex::new(SystemTime::now()), } } @@ -141,12 +139,7 @@ impl> Histogram { Ok(guard) if !guard.is_empty() => guard, _ => return (0, None), }; - let t = SystemTime::now(); - let start = self - .start - .lock() - .map(|s| *s) - .unwrap_or_else(|_| SystemTime::now()); + let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { Some(data::Histogram { @@ -171,8 +164,6 @@ impl> Histogram { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: start, - time: t, count: b.count, bounds: self.hist_values.bounds.clone(), bucket_counts: b.counts.clone(), @@ -194,12 +185,6 @@ impl> Histogram { exemplars: vec![], }); } - - // The delta collection cycle resets. - if let Ok(mut start) = self.start.lock() { - *start = t; - } - (n, new_agg.map(|a| Box::new(a) as Box<_>)) } @@ -211,12 +196,7 @@ impl> Histogram { Ok(guard) if !guard.is_empty() => guard, _ => return (0, None), }; - let t = SystemTime::now(); - let start = self - .start - .lock() - .map(|s| *s) - .unwrap_or_else(|_| SystemTime::now()); + let h = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if h.is_none() { Some(data::Histogram { @@ -245,8 +225,6 @@ impl> Histogram { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: start, - time: t, count: b.count, bounds: self.hist_values.bounds.clone(), bucket_counts: b.counts.clone(), diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/last_value.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/last_value.rs index 6cf003aec6..04029c532f 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/last_value.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/last_value.rs @@ -1,7 +1,6 @@ use std::{ collections::{hash_map::Entry, HashMap}, sync::Mutex, - time::SystemTime, }; use crate::{metrics::data::DataPoint, metrics::AttributeSet}; @@ -14,7 +13,6 @@ use super::{ /// Timestamped measurement data. struct DataPointValue { - timestamp: SystemTime, value: T, } @@ -31,7 +29,6 @@ impl> LastValue { pub(crate) fn measure(&self, measurement: T, attrs: AttributeSet) { let d: DataPointValue = DataPointValue { - timestamp: SystemTime::now(), value: measurement, }; if let Ok(mut values) = self.values.lock() { @@ -70,9 +67,7 @@ impl> LastValue { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - time: Some(value.timestamp), value: value.value, - start_time: None, exemplars: vec![], }); } diff --git a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/sum.rs b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/sum.rs index 141b2659bb..2f2b660886 100644 --- a/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/sum.rs +++ b/third_party/opentelemetry/opentelemetry-rust/opentelemetry-sdk/src/metrics/internal/sum.rs @@ -1,9 +1,10 @@ -use std::sync::atomic::{AtomicBool, Ordering}; -use std::vec; +extern crate alloc; + +use core::sync::atomic::{AtomicBool, Ordering}; +use alloc::vec; use std::{ collections::HashMap, sync::{Mutex, RwLock}, - time::SystemTime, }; use crate::metrics::data::{self, Aggregation, DataPoint, Temporality}; @@ -82,7 +83,6 @@ impl> ValueMap { pub(crate) struct Sum> { value_map: ValueMap, monotonic: bool, - start: Mutex, } impl> Sum { @@ -95,7 +95,6 @@ impl> Sum { Sum { value_map: ValueMap::new(), monotonic, - start: Mutex::new(SystemTime::now()), } } @@ -107,7 +106,6 @@ impl> Sum { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { @@ -136,7 +134,6 @@ impl> Sum { .reserve_exact(n - s_data.data_points.capacity()); } - let prev_start = self.start.lock().map(|start| *start).unwrap_or(t); if self .value_map .has_no_value_attribute_value @@ -144,8 +141,6 @@ impl> Sum { { s_data.data_points.push(DataPoint { attributes: vec![], - start_time: Some(prev_start), - time: Some(t), value: self.value_map.no_attribute_value.get_and_reset_value(), exemplars: vec![], }); @@ -157,18 +152,11 @@ impl> Sum { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: Some(prev_start), - time: Some(t), value: value.get_value(), exemplars: vec![], }); } - // The delta collection cycle resets. - if let Ok(mut start) = self.start.lock() { - *start = t; - } - ( s_data.data_points.len(), new_agg.map(|a| Box::new(a) as Box<_>), @@ -179,7 +167,6 @@ impl> Sum { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { @@ -208,8 +195,6 @@ impl> Sum { .reserve_exact(n - s_data.data_points.capacity()); } - let prev_start = self.start.lock().map(|start| *start).unwrap_or(t); - if self .value_map .has_no_value_attribute_value @@ -217,8 +202,6 @@ impl> Sum { { s_data.data_points.push(DataPoint { attributes: vec![], - start_time: Some(prev_start), - time: Some(t), value: self.value_map.no_attribute_value.get_value(), exemplars: vec![], }); @@ -234,8 +217,6 @@ impl> Sum { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: Some(prev_start), - time: Some(t), value: value.get_value(), exemplars: vec![], }); @@ -252,7 +233,6 @@ impl> Sum { pub(crate) struct PrecomputedSum> { value_map: ValueMap, monotonic: bool, - start: Mutex, reported: Mutex>, } @@ -261,7 +241,6 @@ impl> PrecomputedSum { PrecomputedSum { value_map: ValueMap::new(), monotonic, - start: Mutex::new(SystemTime::now()), reported: Mutex::new(Default::default()), } } @@ -274,8 +253,6 @@ impl> PrecomputedSum { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); - let prev_start = self.start.lock().map(|start| *start).unwrap_or(t); let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { @@ -316,8 +293,6 @@ impl> PrecomputedSum { { s_data.data_points.push(DataPoint { attributes: vec![], - start_time: Some(prev_start), - time: Some(t), value: self.value_map.no_attribute_value.get_and_reset_value(), exemplars: vec![], }); @@ -334,18 +309,11 @@ impl> PrecomputedSum { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: Some(prev_start), - time: Some(t), value: delta, exemplars: vec![], }); } - // The delta collection cycle resets. - if let Ok(mut start) = self.start.lock() { - *start = t; - } - *reported = new_reported; drop(reported); // drop before values guard is dropped @@ -359,8 +327,6 @@ impl> PrecomputedSum { &self, dest: Option<&mut dyn Aggregation>, ) -> (usize, Option>) { - let t = SystemTime::now(); - let prev_start = self.start.lock().map(|start| *start).unwrap_or(t); let s_data = dest.and_then(|d| d.as_mut().downcast_mut::>()); let mut new_agg = if s_data.is_none() { @@ -401,8 +367,6 @@ impl> PrecomputedSum { { s_data.data_points.push(DataPoint { attributes: vec![], - start_time: Some(prev_start), - time: Some(t), value: self.value_map.no_attribute_value.get_value(), exemplars: vec![], }); @@ -419,8 +383,6 @@ impl> PrecomputedSum { .iter() .map(|(k, v)| KeyValue::new(k.clone(), v.clone())) .collect(), - start_time: Some(prev_start), - time: Some(t), value: delta, exemplars: vec![], });