Skip to content

Commit

Permalink
ref(profiles): Ensure UUIDs for chunk and profiler and sort samples (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
phacops committed May 14, 2024
1 parent 087ac91 commit 1ddc9a2
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
- Emit negative outcomes when metrics are rejected because of a disabled namespace. ([#3544](https://github.com/getsentry/relay/pull/3544))
- Add AI model costs to global config. ([#3579](https://github.com/getsentry/relay/pull/3579))
- Add support for `event.` in the `Span` `Getter` implementation. ([#3577](https://github.com/getsentry/relay/pull/3577))
- Ensure `chunk_id` and `profiler_id` are UUIDs and sort samples. ([#3588](https://github.com/getsentry/relay/pull/3588))

## 24.4.2

Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions relay-profiling/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ itertools = { workspace = true }
relay-base-schema = { workspace = true }
relay-event-schema = { workspace = true }
relay-log = { workspace = true }
relay-metrics = { workspace = true }
relay-protocol = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
Expand Down
50 changes: 45 additions & 5 deletions relay-profiling/src/sample/v2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,19 @@ use std::collections::{BTreeMap, HashSet};

use serde::{Deserialize, Serialize};

use relay_event_schema::protocol::EventId;
use relay_metrics::FiniteF64;

use crate::error::ProfileError;
use crate::measurements::Measurement;
use crate::sample::{DebugMeta, Frame, ThreadMetadata, Version};

#[derive(Debug, Serialize, Deserialize)]
pub struct ProfileMetadata {
/// Random UUID identifying a chunk
pub chunk_id: String,
pub chunk_id: EventId,
/// Random UUID for each profiler session
pub profiler_id: String,
pub profiler_id: EventId,

#[serde(default, skip_serializing_if = "DebugMeta::is_empty")]
pub debug_meta: DebugMeta,
Expand All @@ -40,7 +43,7 @@ pub struct ProfileMetadata {
pub struct Sample {
/// Unix timestamp in seconds with millisecond precision when the sample
/// was captured.
pub timestamp: f64,
pub timestamp: FiniteF64,
/// Index of the stack in the `stacks` field of the profile.
pub stack_id: usize,
/// Thread or queue identifier
Expand All @@ -65,7 +68,7 @@ impl ProfileChunk {
}
}

#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct ProfileData {
/// `samples` contains the list of samples referencing a stack and thread identifier.
/// If 2 stack of frames captured at 2 different timestamps are identical, you're expected to
Expand Down Expand Up @@ -95,6 +98,8 @@ impl ProfileData {
return Err(ProfileError::NotEnoughSamples);
}

self.samples.sort_by_key(|s| s.timestamp);

if !self.all_stacks_referenced_by_samples_exist() {
return Err(ProfileError::MalformedSamples);
}
Expand Down Expand Up @@ -154,7 +159,9 @@ pub fn parse(payload: &[u8]) -> Result<ProfileChunk, ProfileError> {

#[cfg(test)]
mod tests {
use crate::sample::v2::parse;
use relay_metrics::FiniteF64;

use crate::sample::v2::{parse, ProfileData, Sample};

#[test]
fn test_roundtrip() {
Expand All @@ -165,4 +172,37 @@ mod tests {
let second_parse = parse(&second_payload[..]);
assert!(second_parse.is_ok(), "{:#?}", second_parse);
}

#[test]
fn test_samples_are_sorted() {
let mut chunk = ProfileData {
samples: vec![
Sample {
stack_id: 0,
thread_id: "1".into(),
timestamp: FiniteF64::new(2000.0).unwrap(),
},
Sample {
stack_id: 0,
thread_id: "1".to_string(),
timestamp: FiniteF64::new(1000.0).unwrap(),
},
],
stacks: vec![vec![0]],
frames: vec![Default::default()],
..Default::default()
};

assert!(chunk.normalize("python").is_ok());

let timestamps: Vec<FiniteF64> = chunk.samples.iter().map(|s| s.timestamp).collect();

assert_eq!(
timestamps,
vec![
FiniteF64::new(1000.0).unwrap(),
FiniteF64::new(2000.0).unwrap(),
]
);
}
}

0 comments on commit 1ddc9a2

Please sign in to comment.