Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 20 additions & 30 deletions cmd/metrics/loader_component.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ package metrics
import (
"encoding/json"
"fmt"
"io/fs"
"log/slog"
"os"
"path/filepath"
Expand All @@ -26,10 +27,6 @@ func (l *ComponentLoader) Load(loaderConfig LoaderConfig) ([]MetricDefinition, [
if err != nil {
return nil, nil, fmt.Errorf("failed to load event definitions: %w", err)
}
metricDefinitions, err = l.filterUncollectableMetrics(metricDefinitions, eventDefinitions, loaderConfig.Metadata)
if err != nil {
return nil, nil, fmt.Errorf("failed to filter uncollectable metrics: %w", err)
}
groupDefinitions, err := l.formEventGroups(metricDefinitions, eventDefinitions, loaderConfig.Metadata)
if err != nil {
return nil, nil, fmt.Errorf("failed to form event groups: %w", err)
Expand Down Expand Up @@ -180,6 +177,10 @@ func (l *ComponentLoader) loadEventDefinitions(metadata Metadata) (events []Comp
if err != nil {
return
}
// sort for deterministic processing order
slices.SortFunc(dirEntries, func(a, b fs.DirEntry) int {
return strings.Compare(a.Name(), b.Name())
})
for _, entry := range dirEntries {
if entry.IsDir() || entry.Name() == "metrics.json" {
continue
Expand All @@ -198,30 +199,6 @@ func (l *ComponentLoader) loadEventDefinitions(metadata Metadata) (events []Comp
return events, nil
}

func (l *ComponentLoader) filterUncollectableMetrics(metrics []MetricDefinition, events []ComponentEvent, metadata Metadata) (filteredMetrics []MetricDefinition, err error) {
uncollectableEvents, err := l.identifyUncollectableEvents(events, metadata)
if err != nil {
return
}
for _, metric := range metrics {
for variable := range metric.Variables {
if slices.Contains(uncollectableEvents, variable) {
slog.Info("Excluding metric due to uncollectable event", slog.String("metric", metric.Name), slog.String("event", variable))
goto nextMetric
}
}
filteredMetrics = append(filteredMetrics, metric)
nextMetric:
}
return filteredMetrics, nil
}

func (l *ComponentLoader) identifyUncollectableEvents(events []ComponentEvent, metadata Metadata) (uncollectableEvents []string, err error) {
// TODO:
// For now, assume all events are collectable
return uncollectableEvents, nil
}

func (l *ComponentLoader) formEventGroups(metrics []MetricDefinition, events []ComponentEvent, metadata Metadata) (groups []GroupDefinition, err error) {
numGPCounters := metadata.NumGeneralPurposeCounters // groups can have at most this many events (plus fixed counters)
eventNames := make(map[string]bool)
Expand All @@ -242,7 +219,14 @@ func (l *ComponentLoader) formEventGroups(metrics []MetricDefinition, events []C
var currentGroup GroupDefinition
var currentGPCount int // Track the current number of GP counters used

// Get variable names and sort them for deterministic order
var variables []string
for variable := range metric.Variables {
variables = append(variables, variable)
}
slices.Sort(variables)

for _, variable := range variables {
// confirm variable is a valid event
if _, exists := eventNames[variable]; !exists {
slog.Warn("Metric variable does not correspond to a known event, skipping variable", slog.String("metric", metric.Name), slog.String("variable", variable))
Expand Down Expand Up @@ -290,12 +274,18 @@ func mergeSmallGroups(groups []GroupDefinition, numGPCounters int) []GroupDefini
}

// Sort groups by size for efficient merging (smallest first)
// Important that this is a deterministic sort
slices.SortFunc(groups, func(a, b GroupDefinition) int {
if len(a) == 0 || len(b) == 0 {
panic("empty group encountered during sorting in mergeSmallGroups")
}
aGPCount := countGPEvents(a)
bGPCount := countGPEvents(b)
return aGPCount - bGPCount
if aGPCount != bGPCount {
return aGPCount - bGPCount
}
return 1 // arbitrary but consistent
})

var mergedGroups []GroupDefinition
processed := make([]bool, len(groups))

Expand Down