/
operations.go
218 lines (189 loc) · 8.55 KB
/
operations.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
package ops
import (
"errors"
"fmt"
"strconv"
"strings"
"time"
"github.com/jinzhu/copier"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/graph/model"
"github.com/litmuschaos/litmus/litmus-portal/graphql-server/utils"
"github.com/patrickmn/go-cache"
log "github.com/sirupsen/logrus"
)
func CreateDateMap(updatedAt string, filter model.TimeFrequency, statsMap map[string]model.WorkflowStatsResponse) error {
// Converts the time stamp(string) to unix
i, err := strconv.ParseInt(updatedAt, 10, 64)
if err != nil {
return err
}
// Converts unix time to time.Time
lastUpdatedTime := time.Unix(i, 0)
// Switch case to fill the map according to filter
switch filter {
case model.TimeFrequencyMonthly:
key := lastUpdatedTime.Month()
month := statsMap[key.String()]
// Incrementing the value for each month
month.Value++
statsMap[key.String()] = month
case model.TimeFrequencyDaily:
key := fmt.Sprintf("%d-%d", lastUpdatedTime.Month(), lastUpdatedTime.Day())
day := statsMap[key]
// Incrementing the value for each day
day.Value++
statsMap[key] = day
case model.TimeFrequencyHourly:
key := fmt.Sprintf("%d-%d", lastUpdatedTime.Day(), lastUpdatedTime.Hour())
hour := statsMap[key]
// Incrementing the value for each hour
hour.Value++
statsMap[key] = hour
default:
return errors.New("no matching filter found")
}
return nil
}
// PatchChaosEventWithVerdict takes annotations with chaos events, chaos verdict prometheus response, prometheus queries and cache object to patch and update chaos events with chaos verdict
func PatchChaosEventWithVerdict(annotations []*model.AnnotationsPromResponse, verdictResponse *model.AnnotationsPromResponse, promInput *model.PrometheusDataRequest, AnalyticsCache *cache.Cache) []*model.AnnotationsPromResponse {
var existingAnnotations []*model.AnnotationsPromResponse
err := copier.Copy(&existingAnnotations, &annotations)
if err != nil {
log.Errorf("error parsing existing annotations %v\n", err)
}
for annotationIndex, annotation := range existingAnnotations {
var existingAnnotation model.AnnotationsPromResponse
err := copier.Copy(&existingAnnotation, &annotation)
if err != nil {
log.Errorf("error parsing existing annotation %v\n", err)
}
if strings.Contains(existingAnnotation.QueryID, "chaos-event") {
var newAnnotation model.AnnotationsPromResponse
err := copier.Copy(&newAnnotation, &verdictResponse)
if err != nil {
log.Errorf("error parsing new annotation %v\n", err)
}
duplicateEventIndices := make(map[int]int)
var duplicateEventOffset = 0
for verdictLegendIndex, verdictLegend := range newAnnotation.Legends {
verdictLegendName := func(str *string) string { return *str }(verdictLegend)
var (
eventFound = false
duplicateEventsFound = false
firstEventFoundAtIndex = 0
)
for eventLegendIndex, eventLegend := range existingAnnotation.Legends {
eventLegendName := func(str *string) string { return *str }(eventLegend)
if verdictLegendName == eventLegendName {
if !eventFound {
firstEventFoundAtIndex = eventLegendIndex
} else {
duplicateEventsFound = true
if _, ok := duplicateEventIndices[eventLegendIndex]; !ok {
duplicateEventIndices[eventLegendIndex] = duplicateEventOffset
duplicateEventOffset++
}
}
eventFound = true
var newVerdictSubData []*model.SubData
for _, verdictSubData := range verdictResponse.SubDataArray[verdictLegendIndex] {
verdictSubDataDate := func(date *float64) float64 { return *date }(verdictSubData.Date)
var subDataFound = false
for eventSubDataIndex, eventSubData := range annotation.SubDataArray[eventLegendIndex] {
if eventSubData != nil {
eventSubDataDate := func(date *float64) float64 { return *date }(eventSubData.Date)
if eventSubData.SubDataName == verdictSubData.SubDataName && eventSubDataDate == verdictSubDataDate {
subDataFound = true
annotations[annotationIndex].SubDataArray[eventLegendIndex][eventSubDataIndex].Value = verdictSubData.Value
}
}
}
if !subDataFound && verdictSubDataDate > 0 {
newVerdictSubData = append(newVerdictSubData, verdictSubData)
}
}
annotations[annotationIndex].SubDataArray[eventLegendIndex] = append(annotations[annotationIndex].SubDataArray[eventLegendIndex], newVerdictSubData...)
if duplicateEventsFound {
existingDates := make(map[float64]bool)
for _, tsv := range annotations[annotationIndex].Tsvs[firstEventFoundAtIndex] {
existingDates[func(date *float64) float64 { return *date }(tsv.Date)] = true
}
if _, ok := existingDates[func(date *float64) float64 { return *date }(annotations[annotationIndex].Tsvs[eventLegendIndex][0].Date)]; !ok {
annotations[annotationIndex].Tsvs[firstEventFoundAtIndex] = append(annotations[annotationIndex].Tsvs[firstEventFoundAtIndex], annotations[annotationIndex].Tsvs[eventLegendIndex]...)
}
annotations[annotationIndex].SubDataArray[firstEventFoundAtIndex] = annotations[annotationIndex].SubDataArray[eventLegendIndex]
}
}
}
if !eventFound {
verdictValid := false
for _, tsv := range verdictResponse.Tsvs[verdictLegendIndex] {
if !verdictValid && func(val *int) int { return *val }(tsv.Value) == 1 {
verdictValid = true
}
}
if verdictValid {
annotations[annotationIndex].Legends = append(annotations[annotationIndex].Legends, verdictLegend)
annotations[annotationIndex].SubDataArray = append(annotations[annotationIndex].SubDataArray, verdictResponse.SubDataArray[verdictLegendIndex])
annotations[annotationIndex].Tsvs = append(annotations[annotationIndex].Tsvs, nil)
}
}
}
if duplicateEventOffset != 0 {
numberOfEvents := len(annotations[annotationIndex].Legends)
for i := 0; i < numberOfEvents; i++ {
if offset, ok := duplicateEventIndices[i]; ok && i-offset >= 0 {
annotations[annotationIndex].Legends = append(annotations[annotationIndex].Legends[:i-offset], annotations[annotationIndex].Legends[i-offset+1:]...)
annotations[annotationIndex].Tsvs = append(annotations[annotationIndex].Tsvs[:i-offset], annotations[annotationIndex].Tsvs[i-offset+1:]...)
annotations[annotationIndex].SubDataArray = append(annotations[annotationIndex].SubDataArray[:i-offset], annotations[annotationIndex].SubDataArray[i-offset+1:]...)
}
}
}
eventCacheKey := annotation.QueryID + "-" + promInput.DsDetails.Start + "-" + promInput.DsDetails.End + "-" + promInput.DsDetails.URL
cacheError := utils.AddCache(AnalyticsCache, eventCacheKey, annotations[annotationIndex])
if cacheError != nil {
errorStr := fmt.Sprintf("%v", cacheError)
if strings.Contains(errorStr, "already exists") {
cacheError = utils.UpdateCache(AnalyticsCache, eventCacheKey, annotations[annotationIndex])
if cacheError != nil {
log.Errorf("error while caching: %v\n", cacheError)
}
}
}
}
}
return annotations
}
// MapMetricsToDashboard takes dashboard query map, prometheus response and query response map for mapping metrics to the panels for a dashboard
func MapMetricsToDashboard(dashboardQueryMap []*model.QueryMapForPanelGroup, newPromResponse *model.PrometheusDataResponse, queryResponseMap map[string]*model.MetricsPromResponse) *model.DashboardPromResponse {
var dashboardMetrics []*model.MetricDataForPanelGroup
for _, panelGroupQueryMap := range dashboardQueryMap {
var panelGroupMetrics []*model.MetricDataForPanel
for _, panelQueryMap := range panelGroupQueryMap.PanelQueryMap {
var panelQueries []*model.MetricsPromResponse
for _, queryID := range panelQueryMap.QueryIDs {
panelQueries = append(panelQueries, queryResponseMap[queryID])
}
panelMetricsData := &model.MetricDataForPanel{
PanelID: panelQueryMap.PanelID,
PanelMetricsResponse: panelQueries,
}
panelGroupMetrics = append(panelGroupMetrics, panelMetricsData)
}
panelGroupMetricsData := &model.MetricDataForPanelGroup{
PanelGroupID: panelGroupQueryMap.PanelGroupID,
PanelGroupMetricsResponse: panelGroupMetrics,
}
dashboardMetrics = append(dashboardMetrics, panelGroupMetricsData)
}
var promResponse model.PrometheusDataResponse
err := copier.Copy(&promResponse, &newPromResponse)
if err != nil {
log.Errorf("error parsing annotations %v\n", err)
}
dashboardResponse := &model.DashboardPromResponse{
DashboardMetricsResponse: dashboardMetrics,
AnnotationsResponse: promResponse.AnnotationsResponse,
}
return dashboardResponse
}