Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cloud Monitoring: Reduce request size when listing labels #44365

Merged
merged 6 commits into from Jan 26, 2022
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
54 changes: 18 additions & 36 deletions pkg/tsdb/cloudmonitoring/time_series_filter.go
Expand Up @@ -85,16 +85,13 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
//nolint: gocyclo
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
labels := make(map[string]map[string]bool)
frames := data.Frames{}

customFrameMeta := map[string]interface{}{}
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.Params.Get("aggregation.alignmentPeriod")
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.Params.Get("aggregation.perSeriesAligner")
for _, series := range response.TimeSeries {
seriesLabels := data.Labels{}
defaultMetricName := series.Metric.Type
labels["resource.type"] = map[string]bool{series.Resource.Type: true}
labels := make(map[string]string)
labels["resource.type"] = series.Resource.Type
seriesLabels["resource.type"] = series.Resource.Type

frame := data.NewFrameOfFieldTypes("", len(series.Points), data.FieldTypeTime, data.FieldTypeFloat64)
Expand All @@ -104,10 +101,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
}

for key, value := range series.Metric.Labels {
if _, ok := labels["metric.label."+key]; !ok {
labels["metric.label."+key] = map[string]bool{}
}
labels["metric.label."+key][value] = true
labels["metric.label."+key] = value
seriesLabels["metric.label."+key] = value

if len(timeSeriesFilter.GroupBys) == 0 || containsLabel(timeSeriesFilter.GroupBys, "metric.label."+key) {
Expand All @@ -116,10 +110,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
}

for key, value := range series.Resource.Labels {
if _, ok := labels["resource.label."+key]; !ok {
labels["resource.label."+key] = map[string]bool{}
}
labels["resource.label."+key][value] = true
labels["resource.label."+key] = value
seriesLabels["resource.label."+key] = value

if containsLabel(timeSeriesFilter.GroupBys, "resource.label."+key) {
Expand All @@ -130,22 +121,19 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
for labelType, labelTypeValues := range series.MetaData {
for labelKey, labelValue := range labelTypeValues {
key := toSnakeCase(fmt.Sprintf("metadata.%s.%s", labelType, labelKey))
if _, ok := labels[key]; !ok {
labels[key] = map[string]bool{}
}

switch v := labelValue.(type) {
case string:
labels[key][v] = true
labels[key] = v
seriesLabels[key] = v
case bool:
strVal := strconv.FormatBool(v)
labels[key][strVal] = true
labels[key] = strVal
seriesLabels[key] = strVal
case []interface{}:
for _, v := range v {
strVal := v.(string)
labels[key][strVal] = true
labels[key] = strVal
if len(seriesLabels[key]) > 0 {
strVal = fmt.Sprintf("%s, %s", seriesLabels[key], strVal)
}
Expand All @@ -155,6 +143,17 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
}
}

customFrameMeta := map[string]interface{}{}
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.Params.Get("aggregation.alignmentPeriod")
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.Params.Get("aggregation.perSeriesAligner")
customFrameMeta["labels"] = labels
customFrameMeta["groupBys"] = timeSeriesFilter.GroupBys
if frame.Meta != nil {
frame.Meta.Custom = customFrameMeta
} else {
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
}

// reverse the order to be ascending
if series.ValueType != "DISTRIBUTION" {
timeSeriesFilter.handleNonDistributionSeries(series, defaultMetricName, seriesLabels, frame)
Expand Down Expand Up @@ -234,23 +233,6 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
frames = addConfigData(frames, dl, response.Unit)
}

labelsByKey := make(map[string][]string)
for key, values := range labels {
for value := range values {
labelsByKey[key] = append(labelsByKey[key], value)
}
}
customFrameMeta["labels"] = labelsByKey
customFrameMeta["groupBys"] = timeSeriesFilter.GroupBys

for _, frame := range frames {
if frame.Meta != nil {
frame.Meta.Custom = customFrameMeta
} else {
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
}
}

queryRes.Frames = frames

return nil
Expand Down
16 changes: 16 additions & 0 deletions pkg/tsdb/cloudmonitoring/time_series_filter_test.go
Expand Up @@ -370,6 +370,22 @@ func TestTimeSeriesFilter(t *testing.T) {
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
})
})

t.Run("Parse labels", func(t *testing.T) {
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
require.NoError(t, err)
assert.Equal(t, 3, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
custom, ok := frames[0].Meta.Custom.(map[string]interface{})
require.True(t, ok)
labels, ok := custom["labels"].(map[string]string)
require.True(t, ok)
assert.Equal(t, "114250375703598695", labels["resource.label.instance_id"])
})
}

func loadTestFile(path string) (cloudMonitoringResponse, error) {
Expand Down
37 changes: 11 additions & 26 deletions pkg/tsdb/cloudmonitoring/time_series_query.go
Expand Up @@ -82,36 +82,32 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, r

func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
labels := make(map[string]map[string]bool)
frames := data.Frames{}

customFrameMeta := map[string]interface{}{}
for _, series := range response.TimeSeriesData {
seriesLabels := make(map[string]string)
frame := data.NewFrameOfFieldTypes("", len(series.PointData), data.FieldTypeTime, data.FieldTypeFloat64)
frame.RefID = timeSeriesQuery.RefID
frame.Meta = &data.FrameMeta{
ExecutedQueryString: executedQueryString,
}
labels := make(map[string]string)

for n, d := range response.TimeSeriesDescriptor.LabelDescriptors {
key := toSnakeCase(d.Key)
key = strings.Replace(key, ".", ".label.", 1)
if _, ok := labels[key]; !ok {
labels[key] = map[string]bool{}
}

labelValue := series.LabelValues[n]
switch d.ValueType {
case "BOOL":
strVal := strconv.FormatBool(labelValue.BoolValue)
labels[key][strVal] = true
labels[key] = strVal
seriesLabels[key] = strVal
case "INT64":
labels[key][labelValue.Int64Value] = true
labels[key] = labelValue.Int64Value
seriesLabels[key] = labelValue.Int64Value
default:
labels[key][labelValue.StringValue] = true
labels[key] = labelValue.StringValue
seriesLabels[key] = labelValue.StringValue
}
}
Expand All @@ -125,10 +121,7 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
continue
}

if _, ok := labels["metric.name"]; !ok {
labels["metric.name"] = map[string]bool{}
}
labels["metric.name"][d.Key] = true
labels["metric.name"] = d.Key
seriesLabels["metric.name"] = d.Key
defaultMetricName := d.Key

Expand Down Expand Up @@ -239,27 +232,19 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
frames = append(frames, buckets[i])
}
}
}
if len(response.TimeSeriesData) > 0 {
dl := timeSeriesQuery.buildDeepLink()
frames = addConfigData(frames, dl, response.Unit)
}

labelsByKey := make(map[string][]string)
for key, values := range labels {
for value := range values {
labelsByKey[key] = append(labelsByKey[key], value)
}
}
customFrameMeta["labels"] = labelsByKey

for _, frame := range frames {
customFrameMeta := map[string]interface{}{}
customFrameMeta["labels"] = labels
if frame.Meta != nil {
frame.Meta.Custom = customFrameMeta
} else {
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
}
}
if len(response.TimeSeriesData) > 0 {
dl := timeSeriesQuery.buildDeepLink()
frames = addConfigData(frames, dl, response.Unit)
}
andresmgot marked this conversation as resolved.
Show resolved Hide resolved

queryRes.Frames = frames

Expand Down
24 changes: 24 additions & 0 deletions pkg/tsdb/cloudmonitoring/time_series_query_test.go
Expand Up @@ -77,4 +77,28 @@ func TestTimeSeriesQuery(t *testing.T) {
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
})
})

t.Run("Parse labels", func(t *testing.T) {
data, err := loadTestFile("./test-data/7-series-response-mql.json")
require.NoError(t, err)

fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
timeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
custom, ok := frames[0].Meta.Custom.(map[string]interface{})
require.True(t, ok)
labels, ok := custom["labels"].(map[string]string)
require.True(t, ok)
assert.Equal(t, "6724404429462225363", labels["resource.label.instance_id"])
})
}
19 changes: 18 additions & 1 deletion public/app/plugins/datasource/cloud-monitoring/datasource.ts
Expand Up @@ -179,7 +179,24 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
const dataQueryResponse = toDataQueryResponse({
data: data,
});
return dataQueryResponse?.data[0]?.meta?.custom?.labels ?? {};
const labels = dataQueryResponse?.data
.map((f) => f.meta?.custom?.labels)
.filter((p) => !!p)
.reduce((acc, labels) => {
for (let key in labels) {
if (!acc[key]) {
acc[key] = new Set<string>();
}
acc[key].add(labels[key]);
}
return acc;
}, {});
return Object.fromEntries(
Object.entries(labels).map((l: any) => {
l[1] = Array.from(l[1]);
return l;
})
);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A slightly simpler solution:

Suggested change
const labels = dataQueryResponse?.data
.map((f) => f.meta?.custom?.labels)
.filter((p) => !!p)
.reduce((acc, labels) => {
for (let key in labels) {
if (!acc[key]) {
acc[key] = new Set<string>();
}
acc[key].add(labels[key]);
}
return acc;
}, {});
return Object.fromEntries(
Object.entries(labels).map((l: any) => {
l[1] = Array.from(l[1]);
return l;
})
);
const labels = dataQueryResponse?.data
.map((f) => f.meta?.custom?.labels)
.filter((p) => !!p)
.reduce((acc, labels) => {
for (let key in labels) {
if (!acc[key]) {
acc[key] = [];
}
if (labels[key] && !acc[key].includes(labels[key])) {
acc[key].push(labels[key]);
}
}
return acc;
}, {});
return labels;

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(I also noticed that some labels were empty, that code removes those)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for advice!
Yes, using includes() is simpler.
But, I don’t want to use O(n) function in loop.
In some env, I guess the number of labels is too big. (I may worry too much)
Using Set is intended.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In our env, Cloud Monitoring record more than 1000 pod_name labels.
Screenshot 2022-01-26 091158

I think Set is faster than includes() if list is long.
https://www.measurethat.net/Benchmarks/Show/16868/1/includes-vs-has-20220126

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not sure if the performance benefit goes away with the later loop but anyway it's fine

})
)
);
Expand Down