Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion src/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ pub mod aperf_runlog;
pub mod aperf_stats;
pub mod constants;
pub mod cpu_utilization;
mod data_formats;
pub mod data_formats;
pub mod diskstats;
pub mod flamegraphs;
pub mod hotline;
Expand All @@ -19,6 +19,7 @@ pub mod systeminfo;
pub mod utils;
pub mod vmstat;

use crate::data::data_formats::AperfData;
use crate::utils::{get_data_name_from_type, DataMetrics};
use crate::visualizer::{DataVisualizer, GetData, ReportParams};
use crate::{noop, InitParams, PerformanceData, VisualizationData, APERF_FILE_FORMAT};
Expand Down Expand Up @@ -354,6 +355,14 @@ macro_rules! processed_data {
}
}

pub fn process_raw_data_new(&mut self, raw_data: Vec<Data>) -> Result<AperfData> {
match self {
$(
ProcessedData::$processed_data(ref mut value) => Ok(value.process_raw_data_new(raw_data)?),
)*
}
}

pub fn custom_raw_data_parser(&mut self, parser_params: ReportParams) -> Result<Vec<ProcessedData>> {
match self {
$(
Expand Down
45 changes: 45 additions & 0 deletions src/data/cpu_utilization.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::data::data_formats::{AperfData, Series, TimeSeriesData, TimeSeriesMetric};
use crate::data::{CollectData, CollectorParams, Data, ProcessedData, TimeEnum};
use crate::utils::{get_data_name_from_type, DataMetrics, Metric};
use crate::visualizer::GetData;
Expand Down Expand Up @@ -351,6 +352,50 @@ impl GetData for CpuUtilization {
process_gathered_raw_data(buffer)
}

fn process_raw_data_new(&mut self, raw_data: Vec<Data>) -> Result<AperfData> {
let mut time_series = TimeSeriesData::default();
let cpu_util_metrics = [
"aggregate",
"user",
"nice",
"system",
"irq",
"softirq",
"idle",
"iowait",
"steal",
];
for metric in cpu_util_metrics {
time_series
.metrics
.insert(metric.to_string(), TimeSeriesMetric::default());
}

for metric in cpu_util_metrics {
let series = Series::new(Some(metric.to_string()));
time_series
.metrics
.get_mut("aggregate")
.unwrap()
.series
.push(series);
}

for buffer in raw_data {
let raw_value = match buffer {
Data::CpuUtilizationRaw(ref value) => value,
_ => panic!("Invalid Data type in raw file"),
};
let _stat = KernelStats::from_reader(raw_value.data.as_bytes()).unwrap();
let _time_now = match raw_value.time {
TimeEnum::DateTime(value) => value,
_ => panic!("Has to be datetime"),
};
//TODO: parse raw values and add to time_series
}
Ok(AperfData::TimeSeries(time_series))
}

fn get_calls(&mut self) -> Result<Vec<String>> {
Ok(vec!["keys".to_string(), "values".to_string()])
}
Expand Down
119 changes: 113 additions & 6 deletions src/data/data_formats.rs
Original file line number Diff line number Diff line change
@@ -1,26 +1,133 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

/**
* This module defines generalized data types of all Aperf processed data used by the
* frontend JavaScripts.
*/

#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(untagged)]
pub enum AperfData {
TimeSeries(TimeSeriesData),
Text(TextData),
KeyValue(KeyValueData),
Graph(GraphData),
}

impl AperfData {
pub fn get_format_name(&self) -> String {
match self {
AperfData::TimeSeries(_) => "time_series".to_string(),
AperfData::Text(_) => "text".to_string(),
AperfData::KeyValue(_) => "key_value".to_string(),
AperfData::Graph(_) => "graph".to_string(),
}
}
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct TimeSeriesData {
pub metrics: HashMap<String, TimeSeriesMetric>,
pub sorted_keys: Vec<String>,
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct TimeSeriesMetric {
pub series: Vec<Series>,
pub metadata: HashMap<String, String>,
pub stats: Statistics,
}

#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Series {
#[serde(skip_serializing_if = "Option::is_none")]
pub series_name: Option<String>,
pub time_diff: Vec<u64>,
pub values: Vec<u64>,
}

impl Series {
pub fn new(series_name: Option<String>) -> Self {
Series {
series_name,
time_diff: Vec::new(),
values: Vec::new(),
}
}
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct Statistics {
pub avg: f64,
pub std: f64,
pub min: f64,
pub max: f64,
pub p50: f64,
pub p90: f64,
pub p99: f64,
pub p99_9: f64,
}

impl Statistics {
pub fn new() -> Self {
Statistics {
avg: 0.0,
std: 0.0,
min: 0.0,
max: 0.0,
p50: 0.0,
p90: 0.0,
p99: 0.0,
p99_9: 0.0,
}
}
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct TextData {
pub lines: Vec<String>,
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct KeyValueData {
pub key_value_groups: HashMap<String, KeyValueGroup>,
}

#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct KeyValueGroup {
pub key_values: HashMap<String, String>,
}

impl KeyValueGroup {
pub fn new() -> Self {
KeyValueGroup {
key_values: HashMap::new(),
}
}
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct GraphData {
pub graph_groups: HashMap<String, GraphGroup>,
}

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct HtmlData {
pub data_type: String,
pub graphs: Vec<HtmlDataGraph>,
pub struct GraphGroup {
pub group_name: String,
pub graphs: HashMap<String, Graph>,
}

#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct HtmlDataGraph {
pub struct Graph {
pub graph_name: String,
pub graph_path: String,
pub graph_size: Option<u64>,
}

impl HtmlDataGraph {
impl Graph {
pub fn new(graph_name: String, graph_path: String, graph_size: Option<u64>) -> Self {
HtmlDataGraph {
Graph {
graph_name,
graph_path,
graph_size,
Expand Down
29 changes: 16 additions & 13 deletions src/data/java_profile.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::data::data_formats::{HtmlData, HtmlDataGraph};
use crate::data::data_formats::{Graph, GraphGroup};
use crate::data::{CollectData, CollectorParams, ProcessedData};
use crate::utils::{get_data_name_from_type, DataMetrics};
use crate::visualizer::GetData;
Expand Down Expand Up @@ -301,7 +301,7 @@ impl CollectData for JavaProfileRaw {

#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct JavaProfile {
pub data: Vec<HtmlData>,
pub data: Vec<GraphGroup>,
}

impl JavaProfile {
Expand All @@ -328,8 +328,8 @@ impl GetData for JavaProfile {
let mut profile_metrics = Vec::from(PROFILE_METRICS);
profile_metrics.push("legacy");
for metric in profile_metrics {
let mut java_profile_data = HtmlData::default();
java_profile_data.data_type = String::from(metric);
let mut java_profile_data = GraphGroup::default();
java_profile_data.group_name = String::from(metric);

for (process, process_names) in &process_map {
let filename = if metric == "legacy" {
Expand All @@ -349,16 +349,19 @@ impl GetData for JavaProfile {
&params.data_dir,
&params.report_dir.join(relative_path),
) {
java_profile_data.graphs.push(HtmlDataGraph::new(
format!(
"JVM: {}, PID: {} ({})",
process_names.first().map_or("unknown", |s| s.as_str()),
process,
metric
java_profile_data.graphs.insert(
process.clone(),
Graph::new(
format!(
"JVM: {}, PID: {} ({})",
process_names.first().map_or("unknown", |s| s.as_str()),
process,
metric
),
format!("{}/{}", relative_path, filename),
Some(file_size),
),
format!("{}/{}", relative_path, filename),
Some(file_size),
));
);
}
}

Expand Down
13 changes: 9 additions & 4 deletions src/html_files/flamegraphs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,20 @@ function getJavaFlamegraphInfo(run, container_id, run_data, metric){
if (handleNoData(container_id, run_data)) return;

let values = JSON.parse(run_data['values']);
let data = values.find((d) => d['data_type'] == metric);
let data = values.find((d) => d['group_name'] == metric);

let sorted = data['graphs'].filter((graph) => !graph["graph_name"].includes('-')).toSorted((x, y) => y["graph_size"] - x["graph_size"]);

if(sorted.length == 0){
if (!data || !data['graphs'] || Object.keys(data['graphs']).length === 0) {
var h3 = document.createElement('h3');
h3.innerText = `No data collected.`;
addElemToNode(container_id, h3);
return;
}

let graphs = [];
for (let key in data['graphs']) {
graphs.push(data['graphs'][key]);
}
let sorted = graphs.filter((graph) => !graph["graph_name"].includes('-')).sort((x, y) => y["graph_size"] - x["graph_size"]);

for(let graph of sorted){
var h3 = document.createElement('h3');
Expand Down
11 changes: 11 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,17 @@ impl VisualizationData {
Ok(())
}

pub fn unpack_data_new(&mut self, name: String) -> Result<()> {
for (dvname, datavisualizer) in self.visualizers.iter_mut() {
debug!("Unpacking data for: {}", dvname);
if datavisualizer.process_raw_data_new(name.clone()).is_err() {
// TODO: remove once all are implemented
debug!("process_raw_data_new not implemented for: {}", dvname);
}
}
Ok(())
}

pub fn get_api(&mut self, name: String) -> Result<String> {
let api = self.visualizers.get(&name).unwrap().api_name.clone();
Ok(api)
Expand Down
46 changes: 45 additions & 1 deletion src/report.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::data::data_formats::AperfData;
use crate::data::JS_DIR;
use crate::{data, PDError, VisualizationData};
use anyhow::Result;
Expand All @@ -22,6 +23,23 @@ pub struct Report {
pub name: Option<String>,
}

#[derive(Serialize, Deserialize, Debug, Clone)]
struct ReportData {
data_name: String,
data_format: String,
runs: HashMap<String, AperfData>,
}

impl ReportData {
fn new(data_name: String) -> Self {
ReportData {
data_name,
data_format: String::new(),
runs: HashMap::new(),
}
}
}

#[derive(Serialize, Deserialize, Debug, Clone)]
struct Api {
name: String,
Expand Down Expand Up @@ -245,7 +263,8 @@ pub fn report(report: &Report, tmp_dir: &PathBuf) -> Result<()> {
/* Init visualizers */
for dir in dir_paths {
let name = visualization_data.init_visualizers(dir.to_owned(), tmp_dir, &report_name)?;
visualization_data.unpack_data(name)?;
visualization_data.unpack_data(name.clone())?;
visualization_data.unpack_data_new(name)?;
}

/* Generate visualizer JS files */
Expand Down Expand Up @@ -312,6 +331,31 @@ pub fn report(report: &Report, tmp_dir: &PathBuf) -> Result<()> {
write!(out_file, "{}", str_out_data)?;
}

/* Get visualizer data unified */
let visualizer_names = visualization_data.get_visualizer_names()?; // TODO: remove after replacing old get visualizer data
let out_loc = report_name.join("data/js/processed_data.js");
let mut out_file = File::create(out_loc)?;
writeln!(out_file, "processed_data = {{")?;
for name in visualizer_names {
let mut report_data = ReportData::new(name.clone());
for run_name in &run_names {
let visualizer = visualization_data
.visualizers
.get_mut(&name)
.ok_or(PDError::VisualizerHashMapEntryError(name.to_string()))?;
let data = match visualizer.run_values_new.get(run_name) {
Some(data) => data,
None => continue,
};
report_data.runs.insert(run_name.clone(), data.clone());
report_data.data_format = data.get_format_name();
}
let out_data = serde_json::to_string(&report_data)?;
write!(out_file, r#""{}": "#, name.clone())?;
writeln!(out_file, "{},", out_data)?;
}
write!(out_file, "}}")?;

let out_analytics = report_name.join("data/js/analytics.js");
let mut out_file = File::create(out_analytics)?;
let stats = visualization_data.get_analytics()?;
Expand Down
Loading
Loading