Skip to content

Commit

Permalink
retrieve cluster and backend metrics by ids
Browse files Browse the repository at this point in the history
For each cluster, we display the metrics that belong to it
and the metrics of the attached backends.
This commit redefines WorkerMetrics served in ProxyResponseData,
with plenty of Options in it and its children strucs, to enable
partial answers: only proxy metrics, only certain clusters or
certain backends, or everything at once.
  • Loading branch information
Keksoj committed Aug 22, 2022
1 parent 67445c4 commit ae8d9c1
Show file tree
Hide file tree
Showing 7 changed files with 221 additions and 234 deletions.
9 changes: 5 additions & 4 deletions bin/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -758,10 +758,11 @@ pub enum QueryCmd {
#[clap(
short = 'b',
long="backends",
help="list of backends, in the form 'cluster_id/backend_id, other_cluster/other_backend'",
use_delimiter = true,
parse(try_from_str = split_slash))]
backends: Vec<(String, String)>,
help="coma-separated list of backends, 'one_backend_id, other_backend_id'",
use_delimiter = true
// parse(try_from_str = split_slash)
)]
backends: Vec<String>,
},
}

Expand Down
7 changes: 3 additions & 4 deletions bin/src/ctl/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -779,10 +779,9 @@ impl CommandManager {
refresh: Option<u32>,
names: Vec<String>,
cluster_ids: Vec<String>,
backends: Vec<(String, String)>, // (cluster_id, backend_id)
// proxy: bool,
backend_ids: Vec<String>,
) -> Result<(), anyhow::Error> {
let query = match (list, cluster_ids.is_empty(), backends.is_empty()) {
let query = match (list, cluster_ids.is_empty(), backend_ids.is_empty()) {
(true, _, _) => QueryMetricsType::List,
(false, true, true) => QueryMetricsType::All,
(false, false, _) => QueryMetricsType::Cluster {
Expand All @@ -792,7 +791,7 @@ impl CommandManager {
},
(false, true, false) => QueryMetricsType::Backend {
metrics: names,
backends,
backend_ids,
date: None,
},
};
Expand Down
109 changes: 36 additions & 73 deletions bin/src/ctl/display.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,106 +87,69 @@ pub fn print_metrics(

if list {
return print_available_metrics(&answers);
}

for (worker_id, query_answer) in answers.iter() {
println!("\nWorker {}\n=========", worker_id);
print_worker_metrics(query_answer)?;
} else {
for (worker_id, query_answer) in answers.iter() {
println!("\nWorker {}\n=========", worker_id);
print_worker_metrics(query_answer)?;
}
}
Ok(())
}

fn print_worker_metrics(query_answer: &QueryAnswer) -> anyhow::Result<()> {
let filtered_metrics = match query_answer {
QueryAnswer::Metrics(QueryAnswerMetrics::Cluster(m)) => filter_cluster_metrics(m),
QueryAnswer::Metrics(QueryAnswerMetrics::Backend(m)) => filter_backend_metrics(m),
match query_answer {
QueryAnswer::Metrics(QueryAnswerMetrics::All(WorkerMetrics { proxy, clusters })) => {
filter_worker_metrics(proxy, clusters)
print_proxy_metrics(proxy);
print_cluster_metrics(clusters);
}
// TODO: handle and print an error of the form
// QueryAnswer::Metrics(QueryAnswerMetrics::Error(String)
_ => bail!("The query answer is wrong."),
};
}

print_gauges_and_counts(&filtered_metrics);
print_percentiles(&filtered_metrics);
Ok(())
}

fn filter_cluster_metrics(
// cluster_id -> (key, value)
cluster_metrics: &BTreeMap<String, BTreeMap<String, FilteredData>>,
) -> BTreeMap<String, FilteredData> {
let mut filtered_metrics = BTreeMap::new();
for (cluster_id, filtered_data) in cluster_metrics.iter() {
for (metric_key, filtered_value) in filtered_data.iter() {
filtered_metrics.insert(
format!("{} {}", cluster_id, metric_key.replace("\t", ".")),
filtered_value.clone(),
);
}
fn print_proxy_metrics(proxy_metrics: &Option<BTreeMap<String, FilteredData>>) {
if let Some(metrics) = proxy_metrics {
let filtered = filter_metrics(metrics);
print_gauges_and_counts(&filtered);
print_percentiles(&filtered);
}
filtered_metrics
}

fn filter_backend_metrics(
// cluster_id -> (backend_id -> (key -> metric))
backend_metrics: &BTreeMap<String, BTreeMap<String, BTreeMap<String, FilteredData>>>,
) -> BTreeMap<String, FilteredData> {
let mut filtered_metrics = BTreeMap::new();
for (cluster_id, cluster_metrics) in backend_metrics.iter() {
for (backend_id, backend_metrics) in cluster_metrics.iter() {
for (metric_key, filtered_value) in backend_metrics.iter() {
filtered_metrics.insert(
format!(
"{}/{} {}",
cluster_id,
backend_id,
metric_key.replace("\t", ".")
),
filtered_value.clone(),
);
fn print_cluster_metrics(cluster_metrics: &Option<BTreeMap<String, ClusterMetricsData>>) {
if let Some(cluster_metrics) = cluster_metrics {
for (cluster_id, cluster_metrics_data) in cluster_metrics.iter() {
println!("\nCluster {}\n--------", cluster_id);

if let Some(cluster) = &cluster_metrics_data.cluster {
let filtered = filter_metrics(&cluster);
print_gauges_and_counts(&filtered);
print_percentiles(&filtered);
}

if let Some(backends) = &cluster_metrics_data.backends {
for (backend_id, backend_metrics) in backends.iter() {
println!("\n{}/{}\n--------", cluster_id, backend_id);
let filtered = filter_metrics(backend_metrics);
print_gauges_and_counts(&filtered);
print_percentiles(&filtered);
}
}
}
}
filtered_metrics
}

fn filter_worker_metrics(
// key -> value
proxy_metrics: &BTreeMap<String, FilteredData>,
// cluster_id -> cluster+backend metrics
cluster_metrics: &BTreeMap<String, ClusterMetricsData>,
) -> BTreeMap<String, FilteredData> {
fn filter_metrics(metrics: &BTreeMap<String, FilteredData>) -> BTreeMap<String, FilteredData> {
let mut filtered_metrics = BTreeMap::new();

for (metric_key, filtered_value) in proxy_metrics.iter() {
for (metric_key, filtered_value) in metrics.iter() {
filtered_metrics.insert(
format!("{}", metric_key.replace("\t", ".")),
filtered_value.clone(),
);
}
for (cluster_id, cluster_metric_data) in cluster_metrics.iter() {
// cluster metrics
for (metric_key, filtered_value) in cluster_metric_data.cluster.iter() {
filtered_metrics.insert(
format!("{} {}", cluster_id, metric_key.replace("\t", ".")),
filtered_value.clone(),
);
}
// backend metrics
for (backend_id, backend_metrics) in cluster_metric_data.backends.iter() {
for (metric_key, filtered_value) in backend_metrics.iter() {
filtered_metrics.insert(
format!(
"{}/{} {}",
cluster_id,
backend_id,
metric_key.replace("\t", ".")
),
filtered_value.clone(),
);
}
}
}
filtered_metrics
}

Expand Down
2 changes: 1 addition & 1 deletion command/assets/answer_metrics.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
},
"clusters": {
"cluster_1": {
"data": {
"cluster": {
"request_time": {
"type": "PERCENTILES",
"data": {
Expand Down
114 changes: 64 additions & 50 deletions command/src/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,8 @@ mod tests {
use crate::proxy::{
AddCertificate, Backend, CertificateAndKey, CertificateFingerprint, Cluster,
ClusterMetricsData, FilteredData, HttpFrontend, LoadBalancingAlgorithms,
LoadBalancingParams, WorkerMetrics, PathRule, Percentiles, ProxyRequestData, RemoveBackend,
RemoveCertificate, Route, RulePosition, TlsVersion,
LoadBalancingParams, PathRule, Percentiles, ProxyRequestData, RemoveBackend,
RemoveCertificate, Route, RulePosition, TlsVersion, WorkerMetrics,
};
use hex::FromHex;
use serde_json;
Expand Down Expand Up @@ -563,40 +563,23 @@ mod tests {
workers: [(
String::from("0"),
WorkerMetrics {
proxy: [
(String::from("sozu.gauge"), FilteredData::Gauge(1)),
(String::from("sozu.count"), FilteredData::Count(-2)),
(String::from("sozu.time"), FilteredData::Time(1234)),
]
.iter()
.cloned()
.collect(),
clusters: [(
String::from("cluster_1"),
ClusterMetricsData {
cluster: [(
String::from("request_time"),
FilteredData::Percentiles(Percentiles {
samples: 42,
p_50: 1,
p_90: 2,
p_99: 10,
p_99_9: 12,
p_99_99: 20,
p_99_999: 22,
p_100: 30,
})
)]
.iter()
.cloned()
.collect(),
backends: [(
String::from("cluster_1-0"),
[
(String::from("bytes_in"), FilteredData::Count(256)),
(String::from("bytes_out"), FilteredData::Count(128)),
(
String::from("percentiles"),
proxy: Some(
[
(String::from("sozu.gauge"), FilteredData::Gauge(1)),
(String::from("sozu.count"), FilteredData::Count(-2)),
(String::from("sozu.time"), FilteredData::Time(1234)),
]
.iter()
.cloned()
.collect()
),
clusters: Some(
[(
String::from("cluster_1"),
ClusterMetricsData {
cluster: Some(
[(
String::from("request_time"),
FilteredData::Percentiles(Percentiles {
samples: 42,
p_50: 1,
Expand All @@ -607,20 +590,51 @@ mod tests {
p_99_999: 22,
p_100: 30,
})
)
]
.iter()
.cloned()
.collect()
)]
.iter()
.cloned()
.collect(),
}
)]
.iter()
.cloned()
.collect()
)]
.iter()
.cloned()
.collect()
),
backends: Some(
[(
String::from("cluster_1-0"),
[
(
String::from("bytes_in"),
FilteredData::Count(256)
),
(
String::from("bytes_out"),
FilteredData::Count(128)
),
(
String::from("percentiles"),
FilteredData::Percentiles(Percentiles {
samples: 42,
p_50: 1,
p_90: 2,
p_99: 10,
p_99_9: 12,
p_99_99: 20,
p_99_999: 22,
p_100: 30,
})
)
]
.iter()
.cloned()
.collect()
)]
.iter()
.cloned()
.collect()
),
}
)]
.iter()
.cloned()
.collect()
)
}
)]
.iter()
Expand Down
31 changes: 6 additions & 25 deletions command/src/proxy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,33 +108,18 @@ pub struct AggregatedMetricsData {
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WorkerMetrics {
/// key -> value
pub proxy: BTreeMap<String, FilteredData>,
pub proxy: Option<BTreeMap<String, FilteredData>>,
/// cluster_id -> cluster_metrics
pub clusters: BTreeMap<String, ClusterMetricsData>,
pub clusters: Option<BTreeMap<String, ClusterMetricsData>>,
}

/// the metrics of a given cluster, with several backends
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ClusterMetricsData {
/// metric name -> metric value
pub cluster: BTreeMap<String, FilteredData>,
pub cluster: Option<BTreeMap<String, FilteredData>>,
/// backend_id -> (metric name-> metric value)
pub backends: BTreeMap<String, BTreeMap<String, FilteredData>>,
}

impl ClusterMetricsData {
pub fn new() -> Self {
ClusterMetricsData {
cluster: BTreeMap::new(),
backends: BTreeMap::new(),
}
}
}

impl Default for ClusterMetricsData {
fn default() -> Self {
Self::new()
}
pub backends: Option<BTreeMap<String, BTreeMap<String, FilteredData>>>,
}

#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
Expand Down Expand Up @@ -826,7 +811,7 @@ pub enum QueryMetricsType {
},
Backend {
metrics: Vec<String>,
backends: Vec<(String, String)>, // (cluster_id, backend_id)
backend_ids: Vec<String>,
date: Option<i64>,
},
All, // dump proxy and cluster metrics
Expand Down Expand Up @@ -865,11 +850,7 @@ pub enum QueryAnswerCertificate {
pub enum QueryAnswerMetrics {
/// (list of proxy metrics, list of cluster metrics)
List((Vec<String>, Vec<String>)),
/// cluster_id -> (key -> metric)
Cluster(BTreeMap<String, BTreeMap<String, FilteredData>>),
/// cluster_id -> (backend_id -> (key -> metric))
Backend(BTreeMap<String, BTreeMap<String, BTreeMap<String, FilteredData>>>),
/// all worker metrics, proxy & clusters
/// all worker metrics, proxy & clusters, with Options all around for partial answers
All(WorkerMetrics),
}

Expand Down

0 comments on commit ae8d9c1

Please sign in to comment.