diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/chainsaw-test.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/chainsaw-test.yaml new file mode 100644 index 000000000..6e5bc9b7f --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/chainsaw-test.yaml @@ -0,0 +1,57 @@ +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + name: otel-tempo-serverless +spec: + # The test case needs to run serially as it requires large resoruces and uses the knative-serving and knative-eventing static namespaces + concurrent: false + namespace: chainsaw-otel-tempo-serverless + steps: + - name: Install Minio object store + try: + - apply: + file: install-minio.yaml + - assert: + file: install-minio-assert.yaml + - name: Install TempoStack + try: + - apply: + file: install-tempo.yaml + - assert: + file: install-tempo-assert.yaml + - name: Create the OTEL collector instance + try: + - apply: + file: create-otel-collector.yaml + - assert: + file: create-otel-collector-assert.yaml + - name: Create Knative serving instance + try: + - apply: + file: create-knative-serving.yaml + - assert: + file: create-knative-serving-assert.yaml + - name: Create Knative eventing instance + try: + - apply: + file: create-knative-eventing.yaml + - assert: + file: create-knative-eventing-assert.yaml + - name: Create Knative service app instance + try: + - apply: + file: create-knative-app.yaml + - assert: + file: create-knative-app-assert.yaml + - name: Generate Traces by probing the Knative service app instance + try: + - apply: + file: generate-traces.yaml + - assert: + file: generate-traces-assert.yaml + - name: Verify the Traces in Tempo instance + try: + - apply: + file: verify-traces.yaml + - assert: + file: verify-traces-assert.yaml diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app-assert.yaml new file mode 100644 index 000000000..82737216f --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app-assert.yaml @@ -0,0 +1,77 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: helloworld-go + serving.knative.dev/configuration: serverless-app + serving.knative.dev/service: serverless-app + namespace: chainsaw-otel-tempo-serverless +status: + availableReplicas: 1 + readyReplicas: 1 + replicas: 1 + +--- +apiVersion: v1 +kind: Service +metadata: + name: serverless-app + namespace: chainsaw-otel-tempo-serverless +spec: + externalName: kourier-internal.knative-serving-ingress.svc.cluster.local + ports: + - name: http2 + port: 80 + protocol: TCP + targetPort: 80 + type: ExternalName + +--- +apiVersion: v1 +kind: Service +metadata: + name: serverless-app-00001 + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8012 + - name: https + port: 443 + protocol: TCP + targetPort: 8112 + +--- +apiVersion: v1 +kind: Service +metadata: + name: serverless-app-00001-private + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http + port: 80 + protocol: TCP + targetPort: 8012 + - name: https + port: 443 + protocol: TCP + targetPort: 8112 + - name: http-autometric + port: 9090 + protocol: TCP + targetPort: http-autometric + - name: http-usermetric + port: 9091 + protocol: TCP + targetPort: http-usermetric + - name: http-queueadm + port: 8022 + protocol: TCP + targetPort: 8022 + - name: http-istio + port: 8012 + protocol: TCP + targetPort: 8012 diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app.yaml new file mode 100644 index 000000000..c1a525cf0 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-app.yaml @@ -0,0 +1,23 @@ +apiVersion: serving.knative.dev/v1 +kind: Service +metadata: + name: serverless-app + namespace: chainsaw-otel-tempo-serverless +spec: + template: + metadata: + labels: + app: helloworld-go + annotations: + autoscaling.knative.dev/minScale: "1" + autoscaling.knative.dev/target: "1" + spec: + containers: + - image: quay.io/openshift-knative/helloworld:v1.2 + imagePullPolicy: Always + resources: + requests: + cpu: "200m" + env: + - name: TARGET + value: "Go Sample v1" diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing-assert.yaml new file mode 100644 index 000000000..5143de4ee --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing-assert.yaml @@ -0,0 +1,94 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: eventing-controller + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: eventing-webhook + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: imc-controller + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: imc-dispatcher + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mt-broker-controller + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mt-broker-filter + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mt-broker-ingress + namespace: knative-eventing +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: v1 +kind: Pod +metadata: + labels: + app: eventing-controller + app.kubernetes.io/component: eventing-controller + app.kubernetes.io/name: knative-eventing + namespace: knative-eventing +status: + containerStatuses: + - name: eventing-controller + ready: true + started: true + - name: kube-rbac-proxy + ready: true + started: true + phase: Running \ No newline at end of file diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing.yaml new file mode 100644 index 000000000..36de20764 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-eventing.yaml @@ -0,0 +1,12 @@ +apiVersion: operator.knative.dev/v1beta1 +kind: KnativeEventing +metadata: + name: serverless + namespace: knative-eventing +spec: + config: + tracing: + backend: "zipkin" + zipkin-endpoint: "http://serverless-collector-headless.chainsaw-otel-tempo-serverless.svc:9411/api/v2/spans" + debug: "false" + sample-rate: "0.1" diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving-assert.yaml new file mode 100644 index 000000000..989ece475 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving-assert.yaml @@ -0,0 +1,72 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: activator + namespace: knative-serving +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: autoscaler + namespace: knative-serving +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: autoscaler-hpa + namespace: knative-serving +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: controller + namespace: knative-serving +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: webhook + namespace: knative-serving +status: + availableReplicas: 2 + readyReplicas: 2 + replicas: 2 + +--- +apiVersion: operator.knative.dev/v1beta1 +kind: KnativeServing +metadata: + name: serverless + namespace: knative-serving +status: + conditions: + - status: "True" + type: DependenciesInstalled + - status: "True" + type: DeploymentsAvailable + - status: "True" + type: InstallSucceeded + - status: "True" + type: Ready + - status: "True" + type: VersionMigrationEligible diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving.yaml new file mode 100644 index 000000000..a57922ec1 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-knative-serving.yaml @@ -0,0 +1,12 @@ +apiVersion: operator.knative.dev/v1beta1 +kind: KnativeServing +metadata: + name: serverless + namespace: knative-serving +spec: + config: + tracing: + backend: "zipkin" + zipkin-endpoint: "http://serverless-collector-headless.chainsaw-otel-tempo-serverless.svc:9411/api/v2/spans" + debug: "false" + sample-rate: "0.1" diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector-assert.yaml new file mode 100644 index 000000000..a30c8787e --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector-assert.yaml @@ -0,0 +1,28 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: serverless-collector + namespace: chainsaw-otel-tempo-serverless +status: + availableReplicas: 1 + readyReplicas: 1 + replicas: 1 + +--- +apiVersion: v1 +kind: Service +metadata: + name: serverless-collector-headless + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - appProtocol: http + name: zipkin + port: 9411 + protocol: TCP + targetPort: 9411 + selector: + app.kubernetes.io/component: opentelemetry-collector + app.kubernetes.io/instance: chainsaw-otel-tempo-serverless.serverless + app.kubernetes.io/managed-by: opentelemetry-operator + app.kubernetes.io/part-of: opentelemetry \ No newline at end of file diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector.yaml new file mode 100644 index 000000000..cc1009c29 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/create-otel-collector.yaml @@ -0,0 +1,23 @@ +apiVersion: opentelemetry.io/v1alpha1 +kind: OpenTelemetryCollector +metadata: + name: serverless + namespace: chainsaw-otel-tempo-serverless +spec: + mode: deployment + config: | + receivers: + zipkin: + processors: + exporters: + otlp: + endpoint: tempo-serverless-distributor:4317 + tls: + insecure: true + debug: + service: + pipelines: + traces: + receivers: [zipkin] + processors: [] + exporters: [debug, otlp] diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces-assert.yaml new file mode 100644 index 000000000..d145527ac --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces-assert.yaml @@ -0,0 +1,7 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: generate-traces + namespace: chainsaw-otel-tempo-serverless +status: + succeeded: 1 \ No newline at end of file diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces.yaml new file mode 100644 index 000000000..ffe2e8730 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/generate-traces.yaml @@ -0,0 +1,37 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: generate-traces + namespace: chainsaw-otel-tempo-serverless +spec: + template: + spec: + containers: + - name: generate-traces + image: ghcr.io/grafana/tempo-operator/test-utils:main + command: + - /bin/bash + - -c + args: + - | + # Initialize success counter + success_count=0 + + # Loop until we have 10 successful runs + while (( success_count < 10 )); do + # Run the curl command + output=$(curl -s http://serverless-app.chainsaw-otel-tempo-serverless.svc || true) + + # Check if the command was successful + if [[ $? -eq 0 && "$output" == 'Hello World! How about some tasty noodles?' ]]; then + # Increment the success counter + ((success_count++)) + echo "Success: $success_count" + else + # If the command failed, retry + echo "Command failed, retrying..." + fi + done + + echo "Completed 10 successful runs." + restartPolicy: Never diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio-assert.yaml new file mode 100644 index 000000000..4cd01852b --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio-assert.yaml @@ -0,0 +1,7 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: minio + namespace: chainsaw-otel-tempo-serverless +status: + readyReplicas: 1 diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio.yaml new file mode 100644 index 000000000..38400dcc3 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-minio.yaml @@ -0,0 +1,79 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + app.kubernetes.io/name: minio + name: minio + namespace: chainsaw-otel-tempo-serverless +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: minio + namespace: chainsaw-otel-tempo-serverless +spec: + selector: + matchLabels: + app.kubernetes.io/name: minio + strategy: + type: Recreate + template: + metadata: + labels: + app.kubernetes.io/name: minio + spec: + containers: + - command: + - /bin/sh + - -c + - | + mkdir -p /storage/tempo && \ + minio server /storage + env: + - name: MINIO_ACCESS_KEY + value: tempo + - name: MINIO_SECRET_KEY + value: supersecret + image: minio/minio + name: minio + ports: + - containerPort: 9000 + volumeMounts: + - mountPath: /storage + name: storage + volumes: + - name: storage + persistentVolumeClaim: + claimName: minio +--- +apiVersion: v1 +kind: Service +metadata: + name: minio + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - port: 9000 + protocol: TCP + targetPort: 9000 + selector: + app.kubernetes.io/name: minio + type: ClusterIP +--- +apiVersion: v1 +kind: Secret +metadata: + name: minio + namespace: chainsaw-otel-tempo-serverless +stringData: + endpoint: http://minio.chainsaw-otel-tempo-serverless.svc.cluster.local:9000 + bucket: tempo + access_key_id: tempo + access_key_secret: supersecret +type: Opaque diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo-assert.yaml new file mode 100644 index 000000000..ac084204a --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo-assert.yaml @@ -0,0 +1,390 @@ +apiVersion: tempo.grafana.com/v1alpha1 +kind: TempoStack +metadata: + name: serverless + namespace: chainsaw-otel-tempo-serverless +# +# Service Accounts +# +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: tempo-serverless + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: serviceaccount + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +# +# Deployments +# +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: tempo-serverless-query-frontend + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: query-frontend + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +spec: + selector: + matchLabels: + app.kubernetes.io/component: query-frontend + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +status: + readyReplicas: 1 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: tempo-serverless-distributor + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: distributor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/component: distributor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +status: + readyReplicas: 1 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: tempo-serverless-querier + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: querier + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +spec: + selector: + matchLabels: + app.kubernetes.io/component: querier + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +status: + readyReplicas: 1 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: tempo-serverless-compactor + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: compactor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +spec: + selector: + matchLabels: + app.kubernetes.io/component: compactor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +status: + readyReplicas: 1 +# +# StatefulSets +# +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: tempo-serverless-ingester + namespace: chainsaw-otel-tempo-serverless + labels: + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +spec: + selector: + matchLabels: + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +status: + readyReplicas: 1 +# +# Services +# +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: compactor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-compactor + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http-memberlist + port: 7946 + protocol: TCP + targetPort: http-memberlist + - name: http + port: 3200 + protocol: TCP + targetPort: http + selector: + app.kubernetes.io/component: compactor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: distributor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-distributor + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: otlp-grpc + port: 4317 + protocol: TCP + targetPort: otlp-grpc + - name: http + port: 3200 + protocol: TCP + targetPort: http + - name: otlp-http + port: 4318 + protocol: TCP + targetPort: otlp-http + - name: thrift-http + port: 14268 + protocol: TCP + targetPort: thrift-http + - name: thrift-compact + port: 6831 + protocol: UDP + targetPort: thrift-compact + - name: thrift-binary + port: 6832 + protocol: UDP + targetPort: thrift-binary + - name: jaeger-grpc + port: 14250 + protocol: TCP + targetPort: jaeger-grpc + - name: http-zipkin + port: 9411 + protocol: TCP + targetPort: http-zipkin + selector: + app.kubernetes.io/component: distributor + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: gossip-ring + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-gossip-ring + namespace: chainsaw-otel-tempo-serverless +spec: + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: http-memberlist + port: 7946 + protocol: TCP + targetPort: http-memberlist + selector: + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + tempo-gossip-member: "true" +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-ingester + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http + port: 3200 + protocol: TCP + targetPort: http + - name: grpc + port: 9095 + protocol: TCP + targetPort: grpc + selector: + app.kubernetes.io/component: ingester + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: querier + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-querier + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http-memberlist + port: 7946 + protocol: TCP + targetPort: http-memberlist + - name: http + port: 3200 + protocol: TCP + targetPort: http + - name: grpc + port: 9095 + protocol: TCP + targetPort: grpc + selector: + app.kubernetes.io/component: querier + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: query-frontend + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-query-frontend + namespace: chainsaw-otel-tempo-serverless +spec: + ports: + - name: http + port: 3200 + protocol: TCP + targetPort: http + - name: grpc + port: 9095 + protocol: TCP + targetPort: grpc + - name: jaeger-grpc + port: 16685 + protocol: TCP + targetPort: jaeger-grpc + - name: jaeger-ui + port: 16686 + protocol: TCP + targetPort: jaeger-ui + - name: jaeger-metrics + port: 16687 + protocol: TCP + targetPort: jaeger-metrics + selector: + app.kubernetes.io/component: query-frontend + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app.kubernetes.io/component: query-frontend-discovery + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator + app.kubernetes.io/name: tempo + name: tempo-serverless-query-frontend-discovery + namespace: chainsaw-otel-tempo-serverless +spec: + clusterIP: None + publishNotReadyAddresses: true + ports: + - name: http + port: 3200 + protocol: TCP + targetPort: http + - name: grpc + port: 9095 + protocol: TCP + targetPort: grpc + - name: grpclb + port: 9096 + protocol: TCP + targetPort: grpclb + - name: jaeger-grpc + port: 16685 + protocol: TCP + targetPort: jaeger-grpc + - name: jaeger-ui + port: 16686 + protocol: TCP + targetPort: jaeger-ui + - name: jaeger-metrics + port: 16687 + protocol: TCP + targetPort: jaeger-metrics + selector: + app.kubernetes.io/component: query-frontend + app.kubernetes.io/instance: serverless + app.kubernetes.io/managed-by: tempo-operator +# +# Route +# +--- +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + name: tempo-serverless-query-frontend + namespace: chainsaw-otel-tempo-serverless +spec: + port: + targetPort: jaeger-ui + tls: + termination: edge + to: + kind: Service + name: tempo-serverless-query-frontend + weight: 100 + wildcardPolicy: None +status: + ingress: + - conditions: + - status: "True" + type: Admitted diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo.yaml new file mode 100644 index 000000000..d2873f917 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/install-tempo.yaml @@ -0,0 +1,24 @@ +apiVersion: tempo.grafana.com/v1alpha1 +kind: TempoStack +metadata: + name: serverless + namespace: chainsaw-otel-tempo-serverless +spec: + storage: + secret: + name: minio + type: s3 + storageSize: 200M + resources: + total: + limits: + memory: 2Gi + cpu: 2000m + template: + queryFrontend: + jaegerQuery: + enabled: true + ingress: + route: + termination: edge + type: route diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces-assert.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces-assert.yaml new file mode 100644 index 000000000..7ced823e4 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces-assert.yaml @@ -0,0 +1,9 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: verify-traces + namespace: chainsaw-otel-tempo-serverless +status: + conditions: + - status: "True" + type: Complete diff --git a/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces.yaml b/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces.yaml new file mode 100644 index 000000000..549466169 --- /dev/null +++ b/tests/e2e-openshift-serverless/otel-tempo-serverless/verify-traces.yaml @@ -0,0 +1,24 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: verify-traces + namespace: chainsaw-otel-tempo-serverless +spec: + template: + spec: + containers: + - name: verify-traces + image: ghcr.io/grafana/tempo-operator/test-utils:main + command: + - /bin/bash + - -eux + - -c + args: + - | + curl -v -G http://tempo-serverless-query-frontend:16686/api/traces --data-urlencode "service=activator-service" | tee /tmp/jaeger.out + num_traces=$(jq ".data | length" /tmp/jaeger.out) + if [[ "$num_traces" -le 0 ]]; then + echo && echo "The Jaeger API returned $num_traces" + exit 1 + fi + restartPolicy: Never \ No newline at end of file