forked from openshift/origin
-
Notifications
You must be signed in to change notification settings - Fork 1
/
deploymentconfigs.go
132 lines (118 loc) · 5.45 KB
/
deploymentconfigs.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
package aggregated_logging
import (
"fmt"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/selection"
"k8s.io/apimachinery/pkg/util/sets"
kapi "k8s.io/kubernetes/pkg/api"
deployapi "github.com/openshift/origin/pkg/apps/apis/apps"
)
const (
componentNameEs = "es"
componentNameEsOps = "es-ops"
componentNameKibana = "kibana"
componentNameKibanaOps = "kibana-ops"
componentNameCurator = "curator"
componentNameCuratorOps = "curator-ops"
componentNameMux = "mux"
)
// loggingComponents are those 'managed' by rep controllers (e.g. fluentd is deployed with a DaemonSet)
var expectedLoggingComponents = sets.NewString(componentNameEs, componentNameKibana, componentNameCurator)
var optionalLoggingComponents = sets.NewString(componentNameEsOps, componentNameKibanaOps, componentNameCuratorOps, componentNameMux)
var loggingComponents = expectedLoggingComponents.Union(optionalLoggingComponents)
const deploymentConfigWarnOptionalMissing = `
Did not find a DeploymentConfig to support optional component '%s'. If you require
this component, please re-install or update logging and specify the appropriate
variable to enable it.
`
const deploymentConfigZeroPodsFound = `
There were no Pods found that support logging. Try running
the following commands for additional information:
$ oc describe dc -n %[1]s
$ oc get events -n %[1]s
`
const deploymentConfigNoPodsFound = `
There were no Pods found for DeploymentConfig '%[1]s'. Try running
the following commands for additional information:
$ oc describe dc %[1]s -n %[2]s
$ oc get events -n %[2]s
`
const deploymentConfigPodsNotRunning = `
The Pod '%[1]s' matched by DeploymentConfig '%[2]s' is not in '%[3]s' status: %[4]s.
Depending upon the state, this could mean there is an error running the image
for one or more pod containers, the node could be pulling images, etc. Try running
the following commands for additional information:
$ oc describe pod %[1]s -n %[5]s
$ oc logs %[1]s -n %[5]s
$ oc get events -n %[5]s
`
func checkDeploymentConfigs(r diagnosticReporter, adapter deploymentConfigAdapter, project string) {
compReq, _ := labels.NewRequirement(componentKey, selection.In, loggingComponents.List())
provReq, _ := labels.NewRequirement(providerKey, selection.Equals, []string{openshiftValue})
selector := labels.NewSelector().Add(*compReq, *provReq)
r.Debug("AGL0040", fmt.Sprintf("Checking for DeploymentConfigs in project '%s' with selector '%s'", project, selector))
dcList, err := adapter.deploymentconfigs(project, metav1.ListOptions{LabelSelector: selector.String()})
if err != nil {
r.Error("AGL0045", err, fmt.Sprintf("There was an error while trying to retrieve the DeploymentConfigs in project '%s': %s", project, err))
return
}
if len(dcList.Items) == 0 {
r.Error("AGL0047", nil, fmt.Sprintf("Did not find any matching DeploymentConfigs in project '%s' which means no logging components were deployed. Try running the installer.", project))
return
}
found := sets.NewString()
for _, entry := range dcList.Items {
comp := labels.Set(entry.ObjectMeta.Labels).Get(componentKey)
found.Insert(comp)
r.Debug("AGL0050", fmt.Sprintf("Found DeploymentConfig '%s' for component '%s'", entry.ObjectMeta.Name, comp))
}
for _, entry := range loggingComponents.List() {
if !found.Has(entry) {
if optionalLoggingComponents.Has(entry) {
r.Info("AGL0060", fmt.Sprintf(deploymentConfigWarnOptionalMissing, entry))
} else {
r.Error("AGL0065", nil, fmt.Sprintf("Did not find a DeploymentConfig to support component '%s'", entry))
}
}
}
checkDeploymentConfigPods(r, adapter, *dcList, project)
}
func checkDeploymentConfigPods(r diagnosticReporter, adapter deploymentConfigAdapter, dcs deployapi.DeploymentConfigList, project string) {
compReq, _ := labels.NewRequirement(componentKey, selection.In, loggingComponents.List())
provReq, _ := labels.NewRequirement(providerKey, selection.Equals, []string{openshiftValue})
podSelector := labels.NewSelector().Add(*compReq, *provReq)
r.Debug("AGL0070", fmt.Sprintf("Getting pods that match selector '%s'", podSelector))
podList, err := adapter.pods(project, metav1.ListOptions{LabelSelector: podSelector.String()})
if err != nil {
r.Error("AGL0075", err, fmt.Sprintf("There was an error while trying to retrieve the pods for the AggregatedLogging stack: %s", err))
return
}
if len(podList.Items) == 0 {
r.Error("AGL0080", nil, fmt.Sprintf(deploymentConfigZeroPodsFound, project))
return
}
dcPodCount := make(map[string]int, len(dcs.Items))
for _, dc := range dcs.Items {
dcPodCount[dc.ObjectMeta.Name] = 0
}
for _, pod := range podList.Items {
r.Debug("AGL0082", fmt.Sprintf("Checking status of Pod '%s'...", pod.ObjectMeta.Name))
dcName, hasDcName := pod.ObjectMeta.Annotations[deployapi.DeploymentConfigAnnotation]
if !hasDcName {
r.Warn("AGL0085", nil, fmt.Sprintf("Found Pod '%s' that that does not reference a logging deployment config which may be acceptable. Skipping check to see if its running.", pod.ObjectMeta.Name))
continue
}
if pod.Status.Phase != kapi.PodRunning {
podName := pod.ObjectMeta.Name
r.Error("AGL0090", nil, fmt.Sprintf(deploymentConfigPodsNotRunning, podName, dcName, kapi.PodRunning, pod.Status.Phase, project))
}
count, _ := dcPodCount[dcName]
dcPodCount[dcName] = count + 1
}
for name, count := range dcPodCount {
if count == 0 {
r.Error("AGL0095", nil, fmt.Sprintf(deploymentConfigNoPodsFound, name, project))
}
}
}